Default to dwarf version 4 on hppa64-hpux
[official-gcc.git] / gcc / tree-cfg.c
blob367dcfa20bf899034abda55bd01837495235e3cb
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2021 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
60 #include "gimplify.h"
61 #include "attribs.h"
62 #include "selftest.h"
63 #include "opts.h"
64 #include "asan.h"
65 #include "profile.h"
67 /* This file contains functions for building the Control Flow Graph (CFG)
68 for a function tree. */
70 /* Local declarations. */
72 /* Initial capacity for the basic block array. */
73 static const int initial_cfg_capacity = 20;
75 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
76 which use a particular edge. The CASE_LABEL_EXPRs are chained together
77 via their CASE_CHAIN field, which we clear after we're done with the
78 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
80 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
81 update the case vector in response to edge redirections.
83 Right now this table is set up and torn down at key points in the
84 compilation process. It would be nice if we could make the table
85 more persistent. The key is getting notification of changes to
86 the CFG (particularly edge removal, creation and redirection). */
88 static hash_map<edge, tree> *edge_to_cases;
90 /* If we record edge_to_cases, this bitmap will hold indexes
91 of basic blocks that end in a GIMPLE_SWITCH which we touched
92 due to edge manipulations. */
94 static bitmap touched_switch_bbs;
96 /* OpenMP region idxs for blocks during cfg pass. */
97 static vec<int> bb_to_omp_idx;
99 /* CFG statistics. */
100 struct cfg_stats_d
102 long num_merged_labels;
105 static struct cfg_stats_d cfg_stats;
107 /* Data to pass to replace_block_vars_by_duplicates_1. */
108 struct replace_decls_d
110 hash_map<tree, tree> *vars_map;
111 tree to_context;
114 /* Hash table to store last discriminator assigned for each locus. */
115 struct locus_discrim_map
117 int location_line;
118 int discriminator;
121 /* Hashtable helpers. */
123 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
125 static inline hashval_t hash (const locus_discrim_map *);
126 static inline bool equal (const locus_discrim_map *,
127 const locus_discrim_map *);
130 /* Trivial hash function for a location_t. ITEM is a pointer to
131 a hash table entry that maps a location_t to a discriminator. */
133 inline hashval_t
134 locus_discrim_hasher::hash (const locus_discrim_map *item)
136 return item->location_line;
139 /* Equality function for the locus-to-discriminator map. A and B
140 point to the two hash table entries to compare. */
142 inline bool
143 locus_discrim_hasher::equal (const locus_discrim_map *a,
144 const locus_discrim_map *b)
146 return a->location_line == b->location_line;
149 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
151 /* Basic blocks and flowgraphs. */
152 static void make_blocks (gimple_seq);
154 /* Edges. */
155 static void make_edges (void);
156 static void assign_discriminators (void);
157 static void make_cond_expr_edges (basic_block);
158 static void make_gimple_switch_edges (gswitch *, basic_block);
159 static bool make_goto_expr_edges (basic_block);
160 static void make_gimple_asm_edges (basic_block);
161 static edge gimple_redirect_edge_and_branch (edge, basic_block);
162 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
164 /* Various helpers. */
165 static inline bool stmt_starts_bb_p (gimple *, gimple *);
166 static int gimple_verify_flow_info (void);
167 static void gimple_make_forwarder_block (edge);
168 static gimple *first_non_label_stmt (basic_block);
169 static bool verify_gimple_transaction (gtransaction *);
170 static bool call_can_make_abnormal_goto (gimple *);
172 /* Flowgraph optimization and cleanup. */
173 static void gimple_merge_blocks (basic_block, basic_block);
174 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
175 static void remove_bb (basic_block);
176 static edge find_taken_edge_computed_goto (basic_block, tree);
177 static edge find_taken_edge_cond_expr (const gcond *, tree);
179 void
180 init_empty_tree_cfg_for_function (struct function *fn)
182 /* Initialize the basic block array. */
183 init_flow (fn);
184 profile_status_for_fn (fn) = PROFILE_ABSENT;
185 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
186 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
187 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
188 initial_cfg_capacity, true);
190 /* Build a mapping of labels to their associated blocks. */
191 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
192 initial_cfg_capacity, true);
194 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
195 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
197 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
198 = EXIT_BLOCK_PTR_FOR_FN (fn);
199 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
200 = ENTRY_BLOCK_PTR_FOR_FN (fn);
203 void
204 init_empty_tree_cfg (void)
206 init_empty_tree_cfg_for_function (cfun);
209 /*---------------------------------------------------------------------------
210 Create basic blocks
211 ---------------------------------------------------------------------------*/
213 /* Entry point to the CFG builder for trees. SEQ is the sequence of
214 statements to be added to the flowgraph. */
216 static void
217 build_gimple_cfg (gimple_seq seq)
219 /* Register specific gimple functions. */
220 gimple_register_cfg_hooks ();
222 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
224 init_empty_tree_cfg ();
226 make_blocks (seq);
228 /* Make sure there is always at least one block, even if it's empty. */
229 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
230 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
232 /* Adjust the size of the array. */
233 if (basic_block_info_for_fn (cfun)->length ()
234 < (size_t) n_basic_blocks_for_fn (cfun))
235 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
236 n_basic_blocks_for_fn (cfun));
238 /* To speed up statement iterator walks, we first purge dead labels. */
239 cleanup_dead_labels ();
241 /* Group case nodes to reduce the number of edges.
242 We do this after cleaning up dead labels because otherwise we miss
243 a lot of obvious case merging opportunities. */
244 group_case_labels ();
246 /* Create the edges of the flowgraph. */
247 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
248 make_edges ();
249 assign_discriminators ();
250 cleanup_dead_labels ();
251 delete discriminator_per_locus;
252 discriminator_per_locus = NULL;
255 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
256 them and propagate the information to LOOP. We assume that the annotations
257 come immediately before the condition in BB, if any. */
259 static void
260 replace_loop_annotate_in_block (basic_block bb, class loop *loop)
262 gimple_stmt_iterator gsi = gsi_last_bb (bb);
263 gimple *stmt = gsi_stmt (gsi);
265 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
266 return;
268 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
270 stmt = gsi_stmt (gsi);
271 if (gimple_code (stmt) != GIMPLE_CALL)
272 break;
273 if (!gimple_call_internal_p (stmt)
274 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
275 break;
277 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
279 case annot_expr_ivdep_kind:
280 loop->safelen = INT_MAX;
281 break;
282 case annot_expr_unroll_kind:
283 loop->unroll
284 = (unsigned short) tree_to_shwi (gimple_call_arg (stmt, 2));
285 cfun->has_unroll = true;
286 break;
287 case annot_expr_no_vector_kind:
288 loop->dont_vectorize = true;
289 break;
290 case annot_expr_vector_kind:
291 loop->force_vectorize = true;
292 cfun->has_force_vectorize_loops = true;
293 break;
294 case annot_expr_parallel_kind:
295 loop->can_be_parallel = true;
296 loop->safelen = INT_MAX;
297 break;
298 default:
299 gcc_unreachable ();
302 stmt = gimple_build_assign (gimple_call_lhs (stmt),
303 gimple_call_arg (stmt, 0));
304 gsi_replace (&gsi, stmt, true);
308 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
309 them and propagate the information to the loop. We assume that the
310 annotations come immediately before the condition of the loop. */
312 static void
313 replace_loop_annotate (void)
315 basic_block bb;
316 gimple_stmt_iterator gsi;
317 gimple *stmt;
319 for (auto loop : loops_list (cfun, 0))
321 /* First look into the header. */
322 replace_loop_annotate_in_block (loop->header, loop);
324 /* Then look into the latch, if any. */
325 if (loop->latch)
326 replace_loop_annotate_in_block (loop->latch, loop);
328 /* Push the global flag_finite_loops state down to individual loops. */
329 loop->finite_p = flag_finite_loops;
332 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
333 FOR_EACH_BB_FN (bb, cfun)
335 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
337 stmt = gsi_stmt (gsi);
338 if (gimple_code (stmt) != GIMPLE_CALL)
339 continue;
340 if (!gimple_call_internal_p (stmt)
341 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
342 continue;
344 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
346 case annot_expr_ivdep_kind:
347 case annot_expr_unroll_kind:
348 case annot_expr_no_vector_kind:
349 case annot_expr_vector_kind:
350 case annot_expr_parallel_kind:
351 break;
352 default:
353 gcc_unreachable ();
356 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
357 stmt = gimple_build_assign (gimple_call_lhs (stmt),
358 gimple_call_arg (stmt, 0));
359 gsi_replace (&gsi, stmt, true);
364 static unsigned int
365 execute_build_cfg (void)
367 gimple_seq body = gimple_body (current_function_decl);
369 build_gimple_cfg (body);
370 gimple_set_body (current_function_decl, NULL);
371 if (dump_file && (dump_flags & TDF_DETAILS))
373 fprintf (dump_file, "Scope blocks:\n");
374 dump_scope_blocks (dump_file, dump_flags);
376 cleanup_tree_cfg ();
378 bb_to_omp_idx.release ();
380 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
381 replace_loop_annotate ();
382 return 0;
385 namespace {
387 const pass_data pass_data_build_cfg =
389 GIMPLE_PASS, /* type */
390 "cfg", /* name */
391 OPTGROUP_NONE, /* optinfo_flags */
392 TV_TREE_CFG, /* tv_id */
393 PROP_gimple_leh, /* properties_required */
394 ( PROP_cfg | PROP_loops ), /* properties_provided */
395 0, /* properties_destroyed */
396 0, /* todo_flags_start */
397 0, /* todo_flags_finish */
400 class pass_build_cfg : public gimple_opt_pass
402 public:
403 pass_build_cfg (gcc::context *ctxt)
404 : gimple_opt_pass (pass_data_build_cfg, ctxt)
407 /* opt_pass methods: */
408 virtual unsigned int execute (function *) { return execute_build_cfg (); }
410 }; // class pass_build_cfg
412 } // anon namespace
414 gimple_opt_pass *
415 make_pass_build_cfg (gcc::context *ctxt)
417 return new pass_build_cfg (ctxt);
421 /* Return true if T is a computed goto. */
423 bool
424 computed_goto_p (gimple *t)
426 return (gimple_code (t) == GIMPLE_GOTO
427 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
430 /* Returns true if the sequence of statements STMTS only contains
431 a call to __builtin_unreachable (). */
433 bool
434 gimple_seq_unreachable_p (gimple_seq stmts)
436 if (stmts == NULL
437 /* Return false if -fsanitize=unreachable, we don't want to
438 optimize away those calls, but rather turn them into
439 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
440 later. */
441 || sanitize_flags_p (SANITIZE_UNREACHABLE))
442 return false;
444 gimple_stmt_iterator gsi = gsi_last (stmts);
446 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
447 return false;
449 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
451 gimple *stmt = gsi_stmt (gsi);
452 if (gimple_code (stmt) != GIMPLE_LABEL
453 && !is_gimple_debug (stmt)
454 && !gimple_clobber_p (stmt))
455 return false;
457 return true;
460 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
461 the other edge points to a bb with just __builtin_unreachable ().
462 I.e. return true for C->M edge in:
463 <bb C>:
465 if (something)
466 goto <bb N>;
467 else
468 goto <bb M>;
469 <bb N>:
470 __builtin_unreachable ();
471 <bb M>: */
473 bool
474 assert_unreachable_fallthru_edge_p (edge e)
476 basic_block pred_bb = e->src;
477 gimple *last = last_stmt (pred_bb);
478 if (last && gimple_code (last) == GIMPLE_COND)
480 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
481 if (other_bb == e->dest)
482 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
483 if (EDGE_COUNT (other_bb->succs) == 0)
484 return gimple_seq_unreachable_p (bb_seq (other_bb));
486 return false;
490 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
491 could alter control flow except via eh. We initialize the flag at
492 CFG build time and only ever clear it later. */
494 static void
495 gimple_call_initialize_ctrl_altering (gimple *stmt)
497 int flags = gimple_call_flags (stmt);
499 /* A call alters control flow if it can make an abnormal goto. */
500 if (call_can_make_abnormal_goto (stmt)
501 /* A call also alters control flow if it does not return. */
502 || flags & ECF_NORETURN
503 /* TM ending statements have backedges out of the transaction.
504 Return true so we split the basic block containing them.
505 Note that the TM_BUILTIN test is merely an optimization. */
506 || ((flags & ECF_TM_BUILTIN)
507 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
508 /* BUILT_IN_RETURN call is same as return statement. */
509 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
510 /* IFN_UNIQUE should be the last insn, to make checking for it
511 as cheap as possible. */
512 || (gimple_call_internal_p (stmt)
513 && gimple_call_internal_unique_p (stmt)))
514 gimple_call_set_ctrl_altering (stmt, true);
515 else
516 gimple_call_set_ctrl_altering (stmt, false);
520 /* Insert SEQ after BB and build a flowgraph. */
522 static basic_block
523 make_blocks_1 (gimple_seq seq, basic_block bb)
525 gimple_stmt_iterator i = gsi_start (seq);
526 gimple *stmt = NULL;
527 gimple *prev_stmt = NULL;
528 bool start_new_block = true;
529 bool first_stmt_of_seq = true;
531 while (!gsi_end_p (i))
533 /* PREV_STMT should only be set to a debug stmt if the debug
534 stmt is before nondebug stmts. Once stmt reaches a nondebug
535 nonlabel, prev_stmt will be set to it, so that
536 stmt_starts_bb_p will know to start a new block if a label is
537 found. However, if stmt was a label after debug stmts only,
538 keep the label in prev_stmt even if we find further debug
539 stmts, for there may be other labels after them, and they
540 should land in the same block. */
541 if (!prev_stmt || !stmt || !is_gimple_debug (stmt))
542 prev_stmt = stmt;
543 stmt = gsi_stmt (i);
545 if (stmt && is_gimple_call (stmt))
546 gimple_call_initialize_ctrl_altering (stmt);
548 /* If the statement starts a new basic block or if we have determined
549 in a previous pass that we need to create a new block for STMT, do
550 so now. */
551 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
553 if (!first_stmt_of_seq)
554 gsi_split_seq_before (&i, &seq);
555 bb = create_basic_block (seq, bb);
556 start_new_block = false;
557 prev_stmt = NULL;
560 /* Now add STMT to BB and create the subgraphs for special statement
561 codes. */
562 gimple_set_bb (stmt, bb);
564 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
565 next iteration. */
566 if (stmt_ends_bb_p (stmt))
568 /* If the stmt can make abnormal goto use a new temporary
569 for the assignment to the LHS. This makes sure the old value
570 of the LHS is available on the abnormal edge. Otherwise
571 we will end up with overlapping life-ranges for abnormal
572 SSA names. */
573 if (gimple_has_lhs (stmt)
574 && stmt_can_make_abnormal_goto (stmt)
575 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
577 tree lhs = gimple_get_lhs (stmt);
578 tree tmp = create_tmp_var (TREE_TYPE (lhs));
579 gimple *s = gimple_build_assign (lhs, tmp);
580 gimple_set_location (s, gimple_location (stmt));
581 gimple_set_block (s, gimple_block (stmt));
582 gimple_set_lhs (stmt, tmp);
583 gsi_insert_after (&i, s, GSI_SAME_STMT);
585 start_new_block = true;
588 gsi_next (&i);
589 first_stmt_of_seq = false;
591 return bb;
594 /* Build a flowgraph for the sequence of stmts SEQ. */
596 static void
597 make_blocks (gimple_seq seq)
599 /* Look for debug markers right before labels, and move the debug
600 stmts after the labels. Accepting labels among debug markers
601 adds no value, just complexity; if we wanted to annotate labels
602 with view numbers (so sequencing among markers would matter) or
603 somesuch, we're probably better off still moving the labels, but
604 adding other debug annotations in their original positions or
605 emitting nonbind or bind markers associated with the labels in
606 the original position of the labels.
608 Moving labels would probably be simpler, but we can't do that:
609 moving labels assigns label ids to them, and doing so because of
610 debug markers makes for -fcompare-debug and possibly even codegen
611 differences. So, we have to move the debug stmts instead. To
612 that end, we scan SEQ backwards, marking the position of the
613 latest (earliest we find) label, and moving debug stmts that are
614 not separated from it by nondebug nonlabel stmts after the
615 label. */
616 if (MAY_HAVE_DEBUG_MARKER_STMTS)
618 gimple_stmt_iterator label = gsi_none ();
620 for (gimple_stmt_iterator i = gsi_last (seq); !gsi_end_p (i); gsi_prev (&i))
622 gimple *stmt = gsi_stmt (i);
624 /* If this is the first label we encounter (latest in SEQ)
625 before nondebug stmts, record its position. */
626 if (is_a <glabel *> (stmt))
628 if (gsi_end_p (label))
629 label = i;
630 continue;
633 /* Without a recorded label position to move debug stmts to,
634 there's nothing to do. */
635 if (gsi_end_p (label))
636 continue;
638 /* Move the debug stmt at I after LABEL. */
639 if (is_gimple_debug (stmt))
641 gcc_assert (gimple_debug_nonbind_marker_p (stmt));
642 /* As STMT is removed, I advances to the stmt after
643 STMT, so the gsi_prev in the for "increment"
644 expression gets us to the stmt we're to visit after
645 STMT. LABEL, however, would advance to the moved
646 stmt if we passed it to gsi_move_after, so pass it a
647 copy instead, so as to keep LABEL pointing to the
648 LABEL. */
649 gimple_stmt_iterator copy = label;
650 gsi_move_after (&i, &copy);
651 continue;
654 /* There aren't any (more?) debug stmts before label, so
655 there isn't anything else to move after it. */
656 label = gsi_none ();
660 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
663 /* Create and return a new empty basic block after bb AFTER. */
665 static basic_block
666 create_bb (void *h, void *e, basic_block after)
668 basic_block bb;
670 gcc_assert (!e);
672 /* Create and initialize a new basic block. Since alloc_block uses
673 GC allocation that clears memory to allocate a basic block, we do
674 not have to clear the newly allocated basic block here. */
675 bb = alloc_block ();
677 bb->index = last_basic_block_for_fn (cfun);
678 bb->flags = BB_NEW;
679 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
681 /* Add the new block to the linked list of blocks. */
682 link_block (bb, after);
684 /* Grow the basic block array if needed. */
685 if ((size_t) last_basic_block_for_fn (cfun)
686 == basic_block_info_for_fn (cfun)->length ())
687 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
688 last_basic_block_for_fn (cfun) + 1);
690 /* Add the newly created block to the array. */
691 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
693 n_basic_blocks_for_fn (cfun)++;
694 last_basic_block_for_fn (cfun)++;
696 return bb;
700 /*---------------------------------------------------------------------------
701 Edge creation
702 ---------------------------------------------------------------------------*/
704 /* If basic block BB has an abnormal edge to a basic block
705 containing IFN_ABNORMAL_DISPATCHER internal call, return
706 that the dispatcher's basic block, otherwise return NULL. */
708 basic_block
709 get_abnormal_succ_dispatcher (basic_block bb)
711 edge e;
712 edge_iterator ei;
714 FOR_EACH_EDGE (e, ei, bb->succs)
715 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
717 gimple_stmt_iterator gsi
718 = gsi_start_nondebug_after_labels_bb (e->dest);
719 gimple *g = gsi_stmt (gsi);
720 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
721 return e->dest;
723 return NULL;
726 /* Helper function for make_edges. Create a basic block with
727 with ABNORMAL_DISPATCHER internal call in it if needed, and
728 create abnormal edges from BBS to it and from it to FOR_BB
729 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
731 static void
732 handle_abnormal_edges (basic_block *dispatcher_bbs, basic_block for_bb,
733 auto_vec<basic_block> *bbs, bool computed_goto)
735 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
736 unsigned int idx = 0;
737 basic_block bb;
738 bool inner = false;
740 if (!bb_to_omp_idx.is_empty ())
742 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
743 if (bb_to_omp_idx[for_bb->index] != 0)
744 inner = true;
747 /* If the dispatcher has been created already, then there are basic
748 blocks with abnormal edges to it, so just make a new edge to
749 for_bb. */
750 if (*dispatcher == NULL)
752 /* Check if there are any basic blocks that need to have
753 abnormal edges to this dispatcher. If there are none, return
754 early. */
755 if (bb_to_omp_idx.is_empty ())
757 if (bbs->is_empty ())
758 return;
760 else
762 FOR_EACH_VEC_ELT (*bbs, idx, bb)
763 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
764 break;
765 if (bb == NULL)
766 return;
769 /* Create the dispatcher bb. */
770 *dispatcher = create_basic_block (NULL, for_bb);
771 if (computed_goto)
773 /* Factor computed gotos into a common computed goto site. Also
774 record the location of that site so that we can un-factor the
775 gotos after we have converted back to normal form. */
776 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
778 /* Create the destination of the factored goto. Each original
779 computed goto will put its desired destination into this
780 variable and jump to the label we create immediately below. */
781 tree var = create_tmp_var (ptr_type_node, "gotovar");
783 /* Build a label for the new block which will contain the
784 factored computed goto. */
785 tree factored_label_decl
786 = create_artificial_label (UNKNOWN_LOCATION);
787 gimple *factored_computed_goto_label
788 = gimple_build_label (factored_label_decl);
789 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
791 /* Build our new computed goto. */
792 gimple *factored_computed_goto = gimple_build_goto (var);
793 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
795 FOR_EACH_VEC_ELT (*bbs, idx, bb)
797 if (!bb_to_omp_idx.is_empty ()
798 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
799 continue;
801 gsi = gsi_last_bb (bb);
802 gimple *last = gsi_stmt (gsi);
804 gcc_assert (computed_goto_p (last));
806 /* Copy the original computed goto's destination into VAR. */
807 gimple *assignment
808 = gimple_build_assign (var, gimple_goto_dest (last));
809 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
811 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
812 e->goto_locus = gimple_location (last);
813 gsi_remove (&gsi, true);
816 else
818 tree arg = inner ? boolean_true_node : boolean_false_node;
819 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
820 1, arg);
821 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
822 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
824 /* Create predecessor edges of the dispatcher. */
825 FOR_EACH_VEC_ELT (*bbs, idx, bb)
827 if (!bb_to_omp_idx.is_empty ()
828 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
829 continue;
830 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
835 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
838 /* Creates outgoing edges for BB. Returns 1 when it ends with an
839 computed goto, returns 2 when it ends with a statement that
840 might return to this function via an nonlocal goto, otherwise
841 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
843 static int
844 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
846 gimple *last = last_stmt (bb);
847 bool fallthru = false;
848 int ret = 0;
850 if (!last)
851 return ret;
853 switch (gimple_code (last))
855 case GIMPLE_GOTO:
856 if (make_goto_expr_edges (bb))
857 ret = 1;
858 fallthru = false;
859 break;
860 case GIMPLE_RETURN:
862 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
863 e->goto_locus = gimple_location (last);
864 fallthru = false;
866 break;
867 case GIMPLE_COND:
868 make_cond_expr_edges (bb);
869 fallthru = false;
870 break;
871 case GIMPLE_SWITCH:
872 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
873 fallthru = false;
874 break;
875 case GIMPLE_RESX:
876 make_eh_edges (last);
877 fallthru = false;
878 break;
879 case GIMPLE_EH_DISPATCH:
880 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
881 break;
883 case GIMPLE_CALL:
884 /* If this function receives a nonlocal goto, then we need to
885 make edges from this call site to all the nonlocal goto
886 handlers. */
887 if (stmt_can_make_abnormal_goto (last))
888 ret = 2;
890 /* If this statement has reachable exception handlers, then
891 create abnormal edges to them. */
892 make_eh_edges (last);
894 /* BUILTIN_RETURN is really a return statement. */
895 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
897 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
898 fallthru = false;
900 /* Some calls are known not to return. */
901 else
902 fallthru = !gimple_call_noreturn_p (last);
903 break;
905 case GIMPLE_ASSIGN:
906 /* A GIMPLE_ASSIGN may throw internally and thus be considered
907 control-altering. */
908 if (is_ctrl_altering_stmt (last))
909 make_eh_edges (last);
910 fallthru = true;
911 break;
913 case GIMPLE_ASM:
914 make_gimple_asm_edges (bb);
915 fallthru = true;
916 break;
918 CASE_GIMPLE_OMP:
919 fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
920 break;
922 case GIMPLE_TRANSACTION:
924 gtransaction *txn = as_a <gtransaction *> (last);
925 tree label1 = gimple_transaction_label_norm (txn);
926 tree label2 = gimple_transaction_label_uninst (txn);
928 if (label1)
929 make_edge (bb, label_to_block (cfun, label1), EDGE_FALLTHRU);
930 if (label2)
931 make_edge (bb, label_to_block (cfun, label2),
932 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
934 tree label3 = gimple_transaction_label_over (txn);
935 if (gimple_transaction_subcode (txn)
936 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
937 make_edge (bb, label_to_block (cfun, label3), EDGE_TM_ABORT);
939 fallthru = false;
941 break;
943 default:
944 gcc_assert (!stmt_ends_bb_p (last));
945 fallthru = true;
946 break;
949 if (fallthru)
950 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
952 return ret;
955 /* Join all the blocks in the flowgraph. */
957 static void
958 make_edges (void)
960 basic_block bb;
961 struct omp_region *cur_region = NULL;
962 auto_vec<basic_block> ab_edge_goto;
963 auto_vec<basic_block> ab_edge_call;
964 int cur_omp_region_idx = 0;
966 /* Create an edge from entry to the first block with executable
967 statements in it. */
968 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
969 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
970 EDGE_FALLTHRU);
972 /* Traverse the basic block array placing edges. */
973 FOR_EACH_BB_FN (bb, cfun)
975 int mer;
977 if (!bb_to_omp_idx.is_empty ())
978 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
980 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
981 if (mer == 1)
982 ab_edge_goto.safe_push (bb);
983 else if (mer == 2)
984 ab_edge_call.safe_push (bb);
986 if (cur_region && bb_to_omp_idx.is_empty ())
987 bb_to_omp_idx.safe_grow_cleared (n_basic_blocks_for_fn (cfun), true);
990 /* Computed gotos are hell to deal with, especially if there are
991 lots of them with a large number of destinations. So we factor
992 them to a common computed goto location before we build the
993 edge list. After we convert back to normal form, we will un-factor
994 the computed gotos since factoring introduces an unwanted jump.
995 For non-local gotos and abnormal edges from calls to calls that return
996 twice or forced labels, factor the abnormal edges too, by having all
997 abnormal edges from the calls go to a common artificial basic block
998 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
999 basic block to all forced labels and calls returning twice.
1000 We do this per-OpenMP structured block, because those regions
1001 are guaranteed to be single entry single exit by the standard,
1002 so it is not allowed to enter or exit such regions abnormally this way,
1003 thus all computed gotos, non-local gotos and setjmp/longjmp calls
1004 must not transfer control across SESE region boundaries. */
1005 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
1007 gimple_stmt_iterator gsi;
1008 basic_block dispatcher_bb_array[2] = { NULL, NULL };
1009 basic_block *dispatcher_bbs = dispatcher_bb_array;
1010 int count = n_basic_blocks_for_fn (cfun);
1012 if (!bb_to_omp_idx.is_empty ())
1013 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
1015 FOR_EACH_BB_FN (bb, cfun)
1017 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1019 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1020 tree target;
1022 if (!label_stmt)
1023 break;
1025 target = gimple_label_label (label_stmt);
1027 /* Make an edge to every label block that has been marked as a
1028 potential target for a computed goto or a non-local goto. */
1029 if (FORCED_LABEL (target))
1030 handle_abnormal_edges (dispatcher_bbs, bb, &ab_edge_goto,
1031 true);
1032 if (DECL_NONLOCAL (target))
1034 handle_abnormal_edges (dispatcher_bbs, bb, &ab_edge_call,
1035 false);
1036 break;
1040 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1041 gsi_next_nondebug (&gsi);
1042 if (!gsi_end_p (gsi))
1044 /* Make an edge to every setjmp-like call. */
1045 gimple *call_stmt = gsi_stmt (gsi);
1046 if (is_gimple_call (call_stmt)
1047 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1048 || gimple_call_builtin_p (call_stmt,
1049 BUILT_IN_SETJMP_RECEIVER)))
1050 handle_abnormal_edges (dispatcher_bbs, bb, &ab_edge_call,
1051 false);
1055 if (!bb_to_omp_idx.is_empty ())
1056 XDELETE (dispatcher_bbs);
1059 omp_free_regions ();
1062 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1063 needed. Returns true if new bbs were created.
1064 Note: This is transitional code, and should not be used for new code. We
1065 should be able to get rid of this by rewriting all target va-arg
1066 gimplification hooks to use an interface gimple_build_cond_value as described
1067 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1069 bool
1070 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1072 gimple *stmt = gsi_stmt (*gsi);
1073 basic_block bb = gimple_bb (stmt);
1074 basic_block lastbb, afterbb;
1075 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1076 edge e;
1077 lastbb = make_blocks_1 (seq, bb);
1078 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1079 return false;
1080 e = split_block (bb, stmt);
1081 /* Move e->dest to come after the new basic blocks. */
1082 afterbb = e->dest;
1083 unlink_block (afterbb);
1084 link_block (afterbb, lastbb);
1085 redirect_edge_succ (e, bb->next_bb);
1086 bb = bb->next_bb;
1087 while (bb != afterbb)
1089 struct omp_region *cur_region = NULL;
1090 profile_count cnt = profile_count::zero ();
1091 bool all = true;
1093 int cur_omp_region_idx = 0;
1094 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1095 gcc_assert (!mer && !cur_region);
1096 add_bb_to_loop (bb, afterbb->loop_father);
1098 edge e;
1099 edge_iterator ei;
1100 FOR_EACH_EDGE (e, ei, bb->preds)
1102 if (e->count ().initialized_p ())
1103 cnt += e->count ();
1104 else
1105 all = false;
1107 tree_guess_outgoing_edge_probabilities (bb);
1108 if (all || profile_status_for_fn (cfun) == PROFILE_READ)
1109 bb->count = cnt;
1111 bb = bb->next_bb;
1113 return true;
1116 /* Find the next available discriminator value for LOCUS. The
1117 discriminator distinguishes among several basic blocks that
1118 share a common locus, allowing for more accurate sample-based
1119 profiling. */
1121 static int
1122 next_discriminator_for_locus (int line)
1124 struct locus_discrim_map item;
1125 struct locus_discrim_map **slot;
1127 item.location_line = line;
1128 item.discriminator = 0;
1129 slot = discriminator_per_locus->find_slot_with_hash (&item, line, INSERT);
1130 gcc_assert (slot);
1131 if (*slot == HTAB_EMPTY_ENTRY)
1133 *slot = XNEW (struct locus_discrim_map);
1134 gcc_assert (*slot);
1135 (*slot)->location_line = line;
1136 (*slot)->discriminator = 0;
1138 (*slot)->discriminator++;
1139 return (*slot)->discriminator;
1142 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1144 static bool
1145 same_line_p (location_t locus1, expanded_location *from, location_t locus2)
1147 expanded_location to;
1149 if (locus1 == locus2)
1150 return true;
1152 to = expand_location (locus2);
1154 if (from->line != to.line)
1155 return false;
1156 if (from->file == to.file)
1157 return true;
1158 return (from->file != NULL
1159 && to.file != NULL
1160 && filename_cmp (from->file, to.file) == 0);
1163 /* Assign discriminators to each basic block. */
1165 static void
1166 assign_discriminators (void)
1168 basic_block bb;
1170 FOR_EACH_BB_FN (bb, cfun)
1172 edge e;
1173 edge_iterator ei;
1174 gimple *last = last_stmt (bb);
1175 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1177 if (locus == UNKNOWN_LOCATION)
1178 continue;
1180 expanded_location locus_e = expand_location (locus);
1182 FOR_EACH_EDGE (e, ei, bb->succs)
1184 gimple *first = first_non_label_stmt (e->dest);
1185 gimple *last = last_stmt (e->dest);
1186 if ((first && same_line_p (locus, &locus_e,
1187 gimple_location (first)))
1188 || (last && same_line_p (locus, &locus_e,
1189 gimple_location (last))))
1191 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1192 bb->discriminator
1193 = next_discriminator_for_locus (locus_e.line);
1194 else
1195 e->dest->discriminator
1196 = next_discriminator_for_locus (locus_e.line);
1202 /* Create the edges for a GIMPLE_COND starting at block BB. */
1204 static void
1205 make_cond_expr_edges (basic_block bb)
1207 gcond *entry = as_a <gcond *> (last_stmt (bb));
1208 gimple *then_stmt, *else_stmt;
1209 basic_block then_bb, else_bb;
1210 tree then_label, else_label;
1211 edge e;
1213 gcc_assert (entry);
1214 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1216 /* Entry basic blocks for each component. */
1217 then_label = gimple_cond_true_label (entry);
1218 else_label = gimple_cond_false_label (entry);
1219 then_bb = label_to_block (cfun, then_label);
1220 else_bb = label_to_block (cfun, else_label);
1221 then_stmt = first_stmt (then_bb);
1222 else_stmt = first_stmt (else_bb);
1224 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1225 e->goto_locus = gimple_location (then_stmt);
1226 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1227 if (e)
1228 e->goto_locus = gimple_location (else_stmt);
1230 /* We do not need the labels anymore. */
1231 gimple_cond_set_true_label (entry, NULL_TREE);
1232 gimple_cond_set_false_label (entry, NULL_TREE);
1236 /* Called for each element in the hash table (P) as we delete the
1237 edge to cases hash table.
1239 Clear all the CASE_CHAINs to prevent problems with copying of
1240 SWITCH_EXPRs and structure sharing rules, then free the hash table
1241 element. */
1243 bool
1244 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1246 tree t, next;
1248 for (t = value; t; t = next)
1250 next = CASE_CHAIN (t);
1251 CASE_CHAIN (t) = NULL;
1254 return true;
1257 /* Start recording information mapping edges to case labels. */
1259 void
1260 start_recording_case_labels (void)
1262 gcc_assert (edge_to_cases == NULL);
1263 edge_to_cases = new hash_map<edge, tree>;
1264 touched_switch_bbs = BITMAP_ALLOC (NULL);
1267 /* Return nonzero if we are recording information for case labels. */
1269 static bool
1270 recording_case_labels_p (void)
1272 return (edge_to_cases != NULL);
1275 /* Stop recording information mapping edges to case labels and
1276 remove any information we have recorded. */
1277 void
1278 end_recording_case_labels (void)
1280 bitmap_iterator bi;
1281 unsigned i;
1282 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1283 delete edge_to_cases;
1284 edge_to_cases = NULL;
1285 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1287 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1288 if (bb)
1290 gimple *stmt = last_stmt (bb);
1291 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1292 group_case_labels_stmt (as_a <gswitch *> (stmt));
1295 BITMAP_FREE (touched_switch_bbs);
1298 /* If we are inside a {start,end}_recording_cases block, then return
1299 a chain of CASE_LABEL_EXPRs from T which reference E.
1301 Otherwise return NULL. */
1303 static tree
1304 get_cases_for_edge (edge e, gswitch *t)
1306 tree *slot;
1307 size_t i, n;
1309 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1310 chains available. Return NULL so the caller can detect this case. */
1311 if (!recording_case_labels_p ())
1312 return NULL;
1314 slot = edge_to_cases->get (e);
1315 if (slot)
1316 return *slot;
1318 /* If we did not find E in the hash table, then this must be the first
1319 time we have been queried for information about E & T. Add all the
1320 elements from T to the hash table then perform the query again. */
1322 n = gimple_switch_num_labels (t);
1323 for (i = 0; i < n; i++)
1325 tree elt = gimple_switch_label (t, i);
1326 tree lab = CASE_LABEL (elt);
1327 basic_block label_bb = label_to_block (cfun, lab);
1328 edge this_edge = find_edge (e->src, label_bb);
1330 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1331 a new chain. */
1332 tree &s = edge_to_cases->get_or_insert (this_edge);
1333 CASE_CHAIN (elt) = s;
1334 s = elt;
1337 return *edge_to_cases->get (e);
1340 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1342 static void
1343 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1345 size_t i, n;
1347 n = gimple_switch_num_labels (entry);
1349 for (i = 0; i < n; ++i)
1351 basic_block label_bb = gimple_switch_label_bb (cfun, entry, i);
1352 make_edge (bb, label_bb, 0);
1357 /* Return the basic block holding label DEST. */
1359 basic_block
1360 label_to_block (struct function *ifun, tree dest)
1362 int uid = LABEL_DECL_UID (dest);
1364 /* We would die hard when faced by an undefined label. Emit a label to
1365 the very first basic block. This will hopefully make even the dataflow
1366 and undefined variable warnings quite right. */
1367 if (seen_error () && uid < 0)
1369 gimple_stmt_iterator gsi =
1370 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1371 gimple *stmt;
1373 stmt = gimple_build_label (dest);
1374 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1375 uid = LABEL_DECL_UID (dest);
1377 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1378 return NULL;
1379 return (*ifun->cfg->x_label_to_block_map)[uid];
1382 /* Create edges for a goto statement at block BB. Returns true
1383 if abnormal edges should be created. */
1385 static bool
1386 make_goto_expr_edges (basic_block bb)
1388 gimple_stmt_iterator last = gsi_last_bb (bb);
1389 gimple *goto_t = gsi_stmt (last);
1391 /* A simple GOTO creates normal edges. */
1392 if (simple_goto_p (goto_t))
1394 tree dest = gimple_goto_dest (goto_t);
1395 basic_block label_bb = label_to_block (cfun, dest);
1396 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1397 e->goto_locus = gimple_location (goto_t);
1398 gsi_remove (&last, true);
1399 return false;
1402 /* A computed GOTO creates abnormal edges. */
1403 return true;
1406 /* Create edges for an asm statement with labels at block BB. */
1408 static void
1409 make_gimple_asm_edges (basic_block bb)
1411 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1412 int i, n = gimple_asm_nlabels (stmt);
1414 for (i = 0; i < n; ++i)
1416 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1417 basic_block label_bb = label_to_block (cfun, label);
1418 make_edge (bb, label_bb, 0);
1422 /*---------------------------------------------------------------------------
1423 Flowgraph analysis
1424 ---------------------------------------------------------------------------*/
1426 /* Cleanup useless labels in basic blocks. This is something we wish
1427 to do early because it allows us to group case labels before creating
1428 the edges for the CFG, and it speeds up block statement iterators in
1429 all passes later on.
1430 We rerun this pass after CFG is created, to get rid of the labels that
1431 are no longer referenced. After then we do not run it any more, since
1432 (almost) no new labels should be created. */
1434 /* A map from basic block index to the leading label of that block. */
1435 struct label_record
1437 /* The label. */
1438 tree label;
1440 /* True if the label is referenced from somewhere. */
1441 bool used;
1444 /* Given LABEL return the first label in the same basic block. */
1446 static tree
1447 main_block_label (tree label, label_record *label_for_bb)
1449 basic_block bb = label_to_block (cfun, label);
1450 tree main_label = label_for_bb[bb->index].label;
1452 /* label_to_block possibly inserted undefined label into the chain. */
1453 if (!main_label)
1455 label_for_bb[bb->index].label = label;
1456 main_label = label;
1459 label_for_bb[bb->index].used = true;
1460 return main_label;
1463 /* Clean up redundant labels within the exception tree. */
1465 static void
1466 cleanup_dead_labels_eh (label_record *label_for_bb)
1468 eh_landing_pad lp;
1469 eh_region r;
1470 tree lab;
1471 int i;
1473 if (cfun->eh == NULL)
1474 return;
1476 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1477 if (lp && lp->post_landing_pad)
1479 lab = main_block_label (lp->post_landing_pad, label_for_bb);
1480 if (lab != lp->post_landing_pad)
1482 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1483 lp->post_landing_pad = lab;
1484 EH_LANDING_PAD_NR (lab) = lp->index;
1488 FOR_ALL_EH_REGION (r)
1489 switch (r->type)
1491 case ERT_CLEANUP:
1492 case ERT_MUST_NOT_THROW:
1493 break;
1495 case ERT_TRY:
1497 eh_catch c;
1498 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1500 lab = c->label;
1501 if (lab)
1502 c->label = main_block_label (lab, label_for_bb);
1505 break;
1507 case ERT_ALLOWED_EXCEPTIONS:
1508 lab = r->u.allowed.label;
1509 if (lab)
1510 r->u.allowed.label = main_block_label (lab, label_for_bb);
1511 break;
1516 /* Cleanup redundant labels. This is a three-step process:
1517 1) Find the leading label for each block.
1518 2) Redirect all references to labels to the leading labels.
1519 3) Cleanup all useless labels. */
1521 void
1522 cleanup_dead_labels (void)
1524 basic_block bb;
1525 label_record *label_for_bb = XCNEWVEC (struct label_record,
1526 last_basic_block_for_fn (cfun));
1528 /* Find a suitable label for each block. We use the first user-defined
1529 label if there is one, or otherwise just the first label we see. */
1530 FOR_EACH_BB_FN (bb, cfun)
1532 gimple_stmt_iterator i;
1534 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1536 tree label;
1537 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1539 if (!label_stmt)
1540 break;
1542 label = gimple_label_label (label_stmt);
1544 /* If we have not yet seen a label for the current block,
1545 remember this one and see if there are more labels. */
1546 if (!label_for_bb[bb->index].label)
1548 label_for_bb[bb->index].label = label;
1549 continue;
1552 /* If we did see a label for the current block already, but it
1553 is an artificially created label, replace it if the current
1554 label is a user defined label. */
1555 if (!DECL_ARTIFICIAL (label)
1556 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1558 label_for_bb[bb->index].label = label;
1559 break;
1564 /* Now redirect all jumps/branches to the selected label.
1565 First do so for each block ending in a control statement. */
1566 FOR_EACH_BB_FN (bb, cfun)
1568 gimple *stmt = last_stmt (bb);
1569 tree label, new_label;
1571 if (!stmt)
1572 continue;
1574 switch (gimple_code (stmt))
1576 case GIMPLE_COND:
1578 gcond *cond_stmt = as_a <gcond *> (stmt);
1579 label = gimple_cond_true_label (cond_stmt);
1580 if (label)
1582 new_label = main_block_label (label, label_for_bb);
1583 if (new_label != label)
1584 gimple_cond_set_true_label (cond_stmt, new_label);
1587 label = gimple_cond_false_label (cond_stmt);
1588 if (label)
1590 new_label = main_block_label (label, label_for_bb);
1591 if (new_label != label)
1592 gimple_cond_set_false_label (cond_stmt, new_label);
1595 break;
1597 case GIMPLE_SWITCH:
1599 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1600 size_t i, n = gimple_switch_num_labels (switch_stmt);
1602 /* Replace all destination labels. */
1603 for (i = 0; i < n; ++i)
1605 tree case_label = gimple_switch_label (switch_stmt, i);
1606 label = CASE_LABEL (case_label);
1607 new_label = main_block_label (label, label_for_bb);
1608 if (new_label != label)
1609 CASE_LABEL (case_label) = new_label;
1611 break;
1614 case GIMPLE_ASM:
1616 gasm *asm_stmt = as_a <gasm *> (stmt);
1617 int i, n = gimple_asm_nlabels (asm_stmt);
1619 for (i = 0; i < n; ++i)
1621 tree cons = gimple_asm_label_op (asm_stmt, i);
1622 tree label = main_block_label (TREE_VALUE (cons), label_for_bb);
1623 TREE_VALUE (cons) = label;
1625 break;
1628 /* We have to handle gotos until they're removed, and we don't
1629 remove them until after we've created the CFG edges. */
1630 case GIMPLE_GOTO:
1631 if (!computed_goto_p (stmt))
1633 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1634 label = gimple_goto_dest (goto_stmt);
1635 new_label = main_block_label (label, label_for_bb);
1636 if (new_label != label)
1637 gimple_goto_set_dest (goto_stmt, new_label);
1639 break;
1641 case GIMPLE_TRANSACTION:
1643 gtransaction *txn = as_a <gtransaction *> (stmt);
1645 label = gimple_transaction_label_norm (txn);
1646 if (label)
1648 new_label = main_block_label (label, label_for_bb);
1649 if (new_label != label)
1650 gimple_transaction_set_label_norm (txn, new_label);
1653 label = gimple_transaction_label_uninst (txn);
1654 if (label)
1656 new_label = main_block_label (label, label_for_bb);
1657 if (new_label != label)
1658 gimple_transaction_set_label_uninst (txn, new_label);
1661 label = gimple_transaction_label_over (txn);
1662 if (label)
1664 new_label = main_block_label (label, label_for_bb);
1665 if (new_label != label)
1666 gimple_transaction_set_label_over (txn, new_label);
1669 break;
1671 default:
1672 break;
1676 /* Do the same for the exception region tree labels. */
1677 cleanup_dead_labels_eh (label_for_bb);
1679 /* Finally, purge dead labels. All user-defined labels and labels that
1680 can be the target of non-local gotos and labels which have their
1681 address taken are preserved. */
1682 FOR_EACH_BB_FN (bb, cfun)
1684 gimple_stmt_iterator i;
1685 tree label_for_this_bb = label_for_bb[bb->index].label;
1687 if (!label_for_this_bb)
1688 continue;
1690 /* If the main label of the block is unused, we may still remove it. */
1691 if (!label_for_bb[bb->index].used)
1692 label_for_this_bb = NULL;
1694 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1696 tree label;
1697 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1699 if (!label_stmt)
1700 break;
1702 label = gimple_label_label (label_stmt);
1704 if (label == label_for_this_bb
1705 || !DECL_ARTIFICIAL (label)
1706 || DECL_NONLOCAL (label)
1707 || FORCED_LABEL (label))
1708 gsi_next (&i);
1709 else
1711 gcc_checking_assert (EH_LANDING_PAD_NR (label) == 0);
1712 gsi_remove (&i, true);
1717 free (label_for_bb);
1720 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1721 the ones jumping to the same label.
1722 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1724 bool
1725 group_case_labels_stmt (gswitch *stmt)
1727 int old_size = gimple_switch_num_labels (stmt);
1728 int i, next_index, new_size;
1729 basic_block default_bb = NULL;
1730 hash_set<tree> *removed_labels = NULL;
1732 default_bb = gimple_switch_default_bb (cfun, stmt);
1734 /* Look for possible opportunities to merge cases. */
1735 new_size = i = 1;
1736 while (i < old_size)
1738 tree base_case, base_high;
1739 basic_block base_bb;
1741 base_case = gimple_switch_label (stmt, i);
1743 gcc_assert (base_case);
1744 base_bb = label_to_block (cfun, CASE_LABEL (base_case));
1746 /* Discard cases that have the same destination as the default case or
1747 whose destination blocks have already been removed as unreachable. */
1748 if (base_bb == NULL
1749 || base_bb == default_bb
1750 || (removed_labels
1751 && removed_labels->contains (CASE_LABEL (base_case))))
1753 i++;
1754 continue;
1757 base_high = CASE_HIGH (base_case)
1758 ? CASE_HIGH (base_case)
1759 : CASE_LOW (base_case);
1760 next_index = i + 1;
1762 /* Try to merge case labels. Break out when we reach the end
1763 of the label vector or when we cannot merge the next case
1764 label with the current one. */
1765 while (next_index < old_size)
1767 tree merge_case = gimple_switch_label (stmt, next_index);
1768 basic_block merge_bb = label_to_block (cfun, CASE_LABEL (merge_case));
1769 wide_int bhp1 = wi::to_wide (base_high) + 1;
1771 /* Merge the cases if they jump to the same place,
1772 and their ranges are consecutive. */
1773 if (merge_bb == base_bb
1774 && (removed_labels == NULL
1775 || !removed_labels->contains (CASE_LABEL (merge_case)))
1776 && wi::to_wide (CASE_LOW (merge_case)) == bhp1)
1778 base_high
1779 = (CASE_HIGH (merge_case)
1780 ? CASE_HIGH (merge_case) : CASE_LOW (merge_case));
1781 CASE_HIGH (base_case) = base_high;
1782 next_index++;
1784 else
1785 break;
1788 /* Discard cases that have an unreachable destination block. */
1789 if (EDGE_COUNT (base_bb->succs) == 0
1790 && gimple_seq_unreachable_p (bb_seq (base_bb))
1791 /* Don't optimize this if __builtin_unreachable () is the
1792 implicitly added one by the C++ FE too early, before
1793 -Wreturn-type can be diagnosed. We'll optimize it later
1794 during switchconv pass or any other cfg cleanup. */
1795 && (gimple_in_ssa_p (cfun)
1796 || (LOCATION_LOCUS (gimple_location (last_stmt (base_bb)))
1797 != BUILTINS_LOCATION)))
1799 edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1800 if (base_edge != NULL)
1802 for (gimple_stmt_iterator gsi = gsi_start_bb (base_bb);
1803 !gsi_end_p (gsi); gsi_next (&gsi))
1804 if (glabel *stmt = dyn_cast <glabel *> (gsi_stmt (gsi)))
1806 if (FORCED_LABEL (gimple_label_label (stmt))
1807 || DECL_NONLOCAL (gimple_label_label (stmt)))
1809 /* Forced/non-local labels aren't going to be removed,
1810 but they will be moved to some neighbouring basic
1811 block. If some later case label refers to one of
1812 those labels, we should throw that case away rather
1813 than keeping it around and refering to some random
1814 other basic block without an edge to it. */
1815 if (removed_labels == NULL)
1816 removed_labels = new hash_set<tree>;
1817 removed_labels->add (gimple_label_label (stmt));
1820 else
1821 break;
1822 remove_edge_and_dominated_blocks (base_edge);
1824 i = next_index;
1825 continue;
1828 if (new_size < i)
1829 gimple_switch_set_label (stmt, new_size,
1830 gimple_switch_label (stmt, i));
1831 i = next_index;
1832 new_size++;
1835 gcc_assert (new_size <= old_size);
1837 if (new_size < old_size)
1838 gimple_switch_set_num_labels (stmt, new_size);
1840 delete removed_labels;
1841 return new_size < old_size;
1844 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1845 and scan the sorted vector of cases. Combine the ones jumping to the
1846 same label. */
1848 bool
1849 group_case_labels (void)
1851 basic_block bb;
1852 bool changed = false;
1854 FOR_EACH_BB_FN (bb, cfun)
1856 gimple *stmt = last_stmt (bb);
1857 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1858 changed |= group_case_labels_stmt (as_a <gswitch *> (stmt));
1861 return changed;
1864 /* Checks whether we can merge block B into block A. */
1866 static bool
1867 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1869 gimple *stmt;
1871 if (!single_succ_p (a))
1872 return false;
1874 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1875 return false;
1877 if (single_succ (a) != b)
1878 return false;
1880 if (!single_pred_p (b))
1881 return false;
1883 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1884 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1885 return false;
1887 /* If A ends by a statement causing exceptions or something similar, we
1888 cannot merge the blocks. */
1889 stmt = last_stmt (a);
1890 if (stmt && stmt_ends_bb_p (stmt))
1891 return false;
1893 /* Do not allow a block with only a non-local label to be merged. */
1894 if (stmt)
1895 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1896 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1897 return false;
1899 /* Examine the labels at the beginning of B. */
1900 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1901 gsi_next (&gsi))
1903 tree lab;
1904 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1905 if (!label_stmt)
1906 break;
1907 lab = gimple_label_label (label_stmt);
1909 /* Do not remove user forced labels or for -O0 any user labels. */
1910 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1911 return false;
1914 /* Protect simple loop latches. We only want to avoid merging
1915 the latch with the loop header or with a block in another
1916 loop in this case. */
1917 if (current_loops
1918 && b->loop_father->latch == b
1919 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1920 && (b->loop_father->header == a
1921 || b->loop_father != a->loop_father))
1922 return false;
1924 /* It must be possible to eliminate all phi nodes in B. If ssa form
1925 is not up-to-date and a name-mapping is registered, we cannot eliminate
1926 any phis. Symbols marked for renaming are never a problem though. */
1927 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1928 gsi_next (&gsi))
1930 gphi *phi = gsi.phi ();
1931 /* Technically only new names matter. */
1932 if (name_registered_for_update_p (PHI_RESULT (phi)))
1933 return false;
1936 /* When not optimizing, don't merge if we'd lose goto_locus. */
1937 if (!optimize
1938 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1940 location_t goto_locus = single_succ_edge (a)->goto_locus;
1941 gimple_stmt_iterator prev, next;
1942 prev = gsi_last_nondebug_bb (a);
1943 next = gsi_after_labels (b);
1944 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1945 gsi_next_nondebug (&next);
1946 if ((gsi_end_p (prev)
1947 || gimple_location (gsi_stmt (prev)) != goto_locus)
1948 && (gsi_end_p (next)
1949 || gimple_location (gsi_stmt (next)) != goto_locus))
1950 return false;
1953 return true;
1956 /* Replaces all uses of NAME by VAL. */
1958 void
1959 replace_uses_by (tree name, tree val)
1961 imm_use_iterator imm_iter;
1962 use_operand_p use;
1963 gimple *stmt;
1964 edge e;
1966 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1968 /* Mark the block if we change the last stmt in it. */
1969 if (cfgcleanup_altered_bbs
1970 && stmt_ends_bb_p (stmt))
1971 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1973 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1975 replace_exp (use, val);
1977 if (gimple_code (stmt) == GIMPLE_PHI)
1979 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1980 PHI_ARG_INDEX_FROM_USE (use));
1981 if (e->flags & EDGE_ABNORMAL
1982 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1984 /* This can only occur for virtual operands, since
1985 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1986 would prevent replacement. */
1987 gcc_checking_assert (virtual_operand_p (name));
1988 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1993 if (gimple_code (stmt) != GIMPLE_PHI)
1995 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1996 gimple *orig_stmt = stmt;
1997 size_t i;
1999 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
2000 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
2001 only change sth from non-invariant to invariant, and only
2002 when propagating constants. */
2003 if (is_gimple_min_invariant (val))
2004 for (i = 0; i < gimple_num_ops (stmt); i++)
2006 tree op = gimple_op (stmt, i);
2007 /* Operands may be empty here. For example, the labels
2008 of a GIMPLE_COND are nulled out following the creation
2009 of the corresponding CFG edges. */
2010 if (op && TREE_CODE (op) == ADDR_EXPR)
2011 recompute_tree_invariant_for_addr_expr (op);
2014 if (fold_stmt (&gsi))
2015 stmt = gsi_stmt (gsi);
2017 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
2018 gimple_purge_dead_eh_edges (gimple_bb (stmt));
2020 update_stmt (stmt);
2024 gcc_checking_assert (has_zero_uses (name));
2026 /* Also update the trees stored in loop structures. */
2027 if (current_loops)
2029 for (auto loop : loops_list (cfun, 0))
2030 substitute_in_loop_info (loop, name, val);
2034 /* Merge block B into block A. */
2036 static void
2037 gimple_merge_blocks (basic_block a, basic_block b)
2039 gimple_stmt_iterator last, gsi;
2040 gphi_iterator psi;
2042 if (dump_file)
2043 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
2045 /* Remove all single-valued PHI nodes from block B of the form
2046 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
2047 gsi = gsi_last_bb (a);
2048 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
2050 gimple *phi = gsi_stmt (psi);
2051 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
2052 gimple *copy;
2053 bool may_replace_uses = (virtual_operand_p (def)
2054 || may_propagate_copy (def, use));
2056 /* In case we maintain loop closed ssa form, do not propagate arguments
2057 of loop exit phi nodes. */
2058 if (current_loops
2059 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
2060 && !virtual_operand_p (def)
2061 && TREE_CODE (use) == SSA_NAME
2062 && a->loop_father != b->loop_father)
2063 may_replace_uses = false;
2065 if (!may_replace_uses)
2067 gcc_assert (!virtual_operand_p (def));
2069 /* Note that just emitting the copies is fine -- there is no problem
2070 with ordering of phi nodes. This is because A is the single
2071 predecessor of B, therefore results of the phi nodes cannot
2072 appear as arguments of the phi nodes. */
2073 copy = gimple_build_assign (def, use);
2074 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
2075 remove_phi_node (&psi, false);
2077 else
2079 /* If we deal with a PHI for virtual operands, we can simply
2080 propagate these without fussing with folding or updating
2081 the stmt. */
2082 if (virtual_operand_p (def))
2084 imm_use_iterator iter;
2085 use_operand_p use_p;
2086 gimple *stmt;
2088 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
2089 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2090 SET_USE (use_p, use);
2092 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2093 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
2095 else
2096 replace_uses_by (def, use);
2098 remove_phi_node (&psi, true);
2102 /* Ensure that B follows A. */
2103 move_block_after (b, a);
2105 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2106 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
2108 /* Remove labels from B and set gimple_bb to A for other statements. */
2109 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2111 gimple *stmt = gsi_stmt (gsi);
2112 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2114 tree label = gimple_label_label (label_stmt);
2115 int lp_nr;
2117 gsi_remove (&gsi, false);
2119 /* Now that we can thread computed gotos, we might have
2120 a situation where we have a forced label in block B
2121 However, the label at the start of block B might still be
2122 used in other ways (think about the runtime checking for
2123 Fortran assigned gotos). So we cannot just delete the
2124 label. Instead we move the label to the start of block A. */
2125 if (FORCED_LABEL (label))
2127 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2128 tree first_label = NULL_TREE;
2129 if (!gsi_end_p (dest_gsi))
2130 if (glabel *first_label_stmt
2131 = dyn_cast <glabel *> (gsi_stmt (dest_gsi)))
2132 first_label = gimple_label_label (first_label_stmt);
2133 if (first_label
2134 && (DECL_NONLOCAL (first_label)
2135 || EH_LANDING_PAD_NR (first_label) != 0))
2136 gsi_insert_after (&dest_gsi, stmt, GSI_NEW_STMT);
2137 else
2138 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2140 /* Other user labels keep around in a form of a debug stmt. */
2141 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_BIND_STMTS)
2143 gimple *dbg = gimple_build_debug_bind (label,
2144 integer_zero_node,
2145 stmt);
2146 gimple_debug_bind_reset_value (dbg);
2147 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2150 lp_nr = EH_LANDING_PAD_NR (label);
2151 if (lp_nr)
2153 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2154 lp->post_landing_pad = NULL;
2157 else
2159 gimple_set_bb (stmt, a);
2160 gsi_next (&gsi);
2164 /* When merging two BBs, if their counts are different, the larger count
2165 is selected as the new bb count. This is to handle inconsistent
2166 profiles. */
2167 if (a->loop_father == b->loop_father)
2169 a->count = a->count.merge (b->count);
2172 /* Merge the sequences. */
2173 last = gsi_last_bb (a);
2174 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2175 set_bb_seq (b, NULL);
2177 if (cfgcleanup_altered_bbs)
2178 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2182 /* Return the one of two successors of BB that is not reachable by a
2183 complex edge, if there is one. Else, return BB. We use
2184 this in optimizations that use post-dominators for their heuristics,
2185 to catch the cases in C++ where function calls are involved. */
2187 basic_block
2188 single_noncomplex_succ (basic_block bb)
2190 edge e0, e1;
2191 if (EDGE_COUNT (bb->succs) != 2)
2192 return bb;
2194 e0 = EDGE_SUCC (bb, 0);
2195 e1 = EDGE_SUCC (bb, 1);
2196 if (e0->flags & EDGE_COMPLEX)
2197 return e1->dest;
2198 if (e1->flags & EDGE_COMPLEX)
2199 return e0->dest;
2201 return bb;
2204 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2206 void
2207 notice_special_calls (gcall *call)
2209 int flags = gimple_call_flags (call);
2211 if (flags & ECF_MAY_BE_ALLOCA)
2212 cfun->calls_alloca = true;
2213 if (flags & ECF_RETURNS_TWICE)
2214 cfun->calls_setjmp = true;
2218 /* Clear flags set by notice_special_calls. Used by dead code removal
2219 to update the flags. */
2221 void
2222 clear_special_calls (void)
2224 cfun->calls_alloca = false;
2225 cfun->calls_setjmp = false;
2228 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2230 static void
2231 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2233 /* Since this block is no longer reachable, we can just delete all
2234 of its PHI nodes. */
2235 remove_phi_nodes (bb);
2237 /* Remove edges to BB's successors. */
2238 while (EDGE_COUNT (bb->succs) > 0)
2239 remove_edge (EDGE_SUCC (bb, 0));
2243 /* Remove statements of basic block BB. */
2245 static void
2246 remove_bb (basic_block bb)
2248 gimple_stmt_iterator i;
2250 if (dump_file)
2252 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2253 if (dump_flags & TDF_DETAILS)
2255 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2256 fprintf (dump_file, "\n");
2260 if (current_loops)
2262 class loop *loop = bb->loop_father;
2264 /* If a loop gets removed, clean up the information associated
2265 with it. */
2266 if (loop->latch == bb
2267 || loop->header == bb)
2268 free_numbers_of_iterations_estimates (loop);
2271 /* Remove all the instructions in the block. */
2272 if (bb_seq (bb) != NULL)
2274 /* Walk backwards so as to get a chance to substitute all
2275 released DEFs into debug stmts. See
2276 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2277 details. */
2278 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2280 gimple *stmt = gsi_stmt (i);
2281 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2282 if (label_stmt
2283 && (FORCED_LABEL (gimple_label_label (label_stmt))
2284 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2286 basic_block new_bb;
2287 gimple_stmt_iterator new_gsi;
2289 /* A non-reachable non-local label may still be referenced.
2290 But it no longer needs to carry the extra semantics of
2291 non-locality. */
2292 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2294 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2295 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2298 new_bb = bb->prev_bb;
2299 /* Don't move any labels into ENTRY block. */
2300 if (new_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
2302 new_bb = single_succ (new_bb);
2303 gcc_assert (new_bb != bb);
2305 if ((unsigned) bb->index < bb_to_omp_idx.length ()
2306 && ((unsigned) new_bb->index >= bb_to_omp_idx.length ()
2307 || (bb_to_omp_idx[bb->index]
2308 != bb_to_omp_idx[new_bb->index])))
2310 /* During cfg pass make sure to put orphaned labels
2311 into the right OMP region. */
2312 unsigned int i;
2313 int idx;
2314 new_bb = NULL;
2315 FOR_EACH_VEC_ELT (bb_to_omp_idx, i, idx)
2316 if (i >= NUM_FIXED_BLOCKS
2317 && idx == bb_to_omp_idx[bb->index]
2318 && i != (unsigned) bb->index)
2320 new_bb = BASIC_BLOCK_FOR_FN (cfun, i);
2321 break;
2323 if (new_bb == NULL)
2325 new_bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2326 gcc_assert (new_bb != bb);
2329 new_gsi = gsi_after_labels (new_bb);
2330 gsi_remove (&i, false);
2331 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2333 else
2335 /* Release SSA definitions. */
2336 release_defs (stmt);
2337 gsi_remove (&i, true);
2340 if (gsi_end_p (i))
2341 i = gsi_last_bb (bb);
2342 else
2343 gsi_prev (&i);
2347 if ((unsigned) bb->index < bb_to_omp_idx.length ())
2348 bb_to_omp_idx[bb->index] = -1;
2349 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2350 bb->il.gimple.seq = NULL;
2351 bb->il.gimple.phi_nodes = NULL;
2355 /* Given a basic block BB and a value VAL for use in the final statement
2356 of the block (if a GIMPLE_COND, GIMPLE_SWITCH, or computed goto), return
2357 the edge that will be taken out of the block.
2358 If VAL is NULL_TREE, then the current value of the final statement's
2359 predicate or index is used.
2360 If the value does not match a unique edge, NULL is returned. */
2362 edge
2363 find_taken_edge (basic_block bb, tree val)
2365 gimple *stmt;
2367 stmt = last_stmt (bb);
2369 /* Handle ENTRY and EXIT. */
2370 if (!stmt)
2371 return NULL;
2373 if (gimple_code (stmt) == GIMPLE_COND)
2374 return find_taken_edge_cond_expr (as_a <gcond *> (stmt), val);
2376 if (gimple_code (stmt) == GIMPLE_SWITCH)
2377 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), val);
2379 if (computed_goto_p (stmt))
2381 /* Only optimize if the argument is a label, if the argument is
2382 not a label then we cannot construct a proper CFG.
2384 It may be the case that we only need to allow the LABEL_REF to
2385 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2386 appear inside a LABEL_EXPR just to be safe. */
2387 if (val
2388 && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2389 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2390 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2393 /* Otherwise we only know the taken successor edge if it's unique. */
2394 return single_succ_p (bb) ? single_succ_edge (bb) : NULL;
2397 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2398 statement, determine which of the outgoing edges will be taken out of the
2399 block. Return NULL if either edge may be taken. */
2401 static edge
2402 find_taken_edge_computed_goto (basic_block bb, tree val)
2404 basic_block dest;
2405 edge e = NULL;
2407 dest = label_to_block (cfun, val);
2408 if (dest)
2409 e = find_edge (bb, dest);
2411 /* It's possible for find_edge to return NULL here on invalid code
2412 that abuses the labels-as-values extension (e.g. code that attempts to
2413 jump *between* functions via stored labels-as-values; PR 84136).
2414 If so, then we simply return that NULL for the edge.
2415 We don't currently have a way of detecting such invalid code, so we
2416 can't assert that it was the case when a NULL edge occurs here. */
2418 return e;
2421 /* Given COND_STMT and a constant value VAL for use as the predicate,
2422 determine which of the two edges will be taken out of
2423 the statement's block. Return NULL if either edge may be taken.
2424 If VAL is NULL_TREE, then the current value of COND_STMT's predicate
2425 is used. */
2427 static edge
2428 find_taken_edge_cond_expr (const gcond *cond_stmt, tree val)
2430 edge true_edge, false_edge;
2432 if (val == NULL_TREE)
2434 /* Use the current value of the predicate. */
2435 if (gimple_cond_true_p (cond_stmt))
2436 val = integer_one_node;
2437 else if (gimple_cond_false_p (cond_stmt))
2438 val = integer_zero_node;
2439 else
2440 return NULL;
2442 else if (TREE_CODE (val) != INTEGER_CST)
2443 return NULL;
2445 extract_true_false_edges_from_block (gimple_bb (cond_stmt),
2446 &true_edge, &false_edge);
2448 return (integer_zerop (val) ? false_edge : true_edge);
2451 /* Given SWITCH_STMT and an INTEGER_CST VAL for use as the index, determine
2452 which edge will be taken out of the statement's block. Return NULL if any
2453 edge may be taken.
2454 If VAL is NULL_TREE, then the current value of SWITCH_STMT's index
2455 is used. */
2457 edge
2458 find_taken_edge_switch_expr (const gswitch *switch_stmt, tree val)
2460 basic_block dest_bb;
2461 edge e;
2462 tree taken_case;
2464 if (gimple_switch_num_labels (switch_stmt) == 1)
2465 taken_case = gimple_switch_default_label (switch_stmt);
2466 else
2468 if (val == NULL_TREE)
2469 val = gimple_switch_index (switch_stmt);
2470 if (TREE_CODE (val) != INTEGER_CST)
2471 return NULL;
2472 else
2473 taken_case = find_case_label_for_value (switch_stmt, val);
2475 dest_bb = label_to_block (cfun, CASE_LABEL (taken_case));
2477 e = find_edge (gimple_bb (switch_stmt), dest_bb);
2478 gcc_assert (e);
2479 return e;
2483 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2484 We can make optimal use here of the fact that the case labels are
2485 sorted: We can do a binary search for a case matching VAL. */
2487 tree
2488 find_case_label_for_value (const gswitch *switch_stmt, tree val)
2490 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2491 tree default_case = gimple_switch_default_label (switch_stmt);
2493 for (low = 0, high = n; high - low > 1; )
2495 size_t i = (high + low) / 2;
2496 tree t = gimple_switch_label (switch_stmt, i);
2497 int cmp;
2499 /* Cache the result of comparing CASE_LOW and val. */
2500 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2502 if (cmp > 0)
2503 high = i;
2504 else
2505 low = i;
2507 if (CASE_HIGH (t) == NULL)
2509 /* A singe-valued case label. */
2510 if (cmp == 0)
2511 return t;
2513 else
2515 /* A case range. We can only handle integer ranges. */
2516 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2517 return t;
2521 return default_case;
2525 /* Dump a basic block on stderr. */
2527 void
2528 gimple_debug_bb (basic_block bb)
2530 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2534 /* Dump basic block with index N on stderr. */
2536 basic_block
2537 gimple_debug_bb_n (int n)
2539 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2540 return BASIC_BLOCK_FOR_FN (cfun, n);
2544 /* Dump the CFG on stderr.
2546 FLAGS are the same used by the tree dumping functions
2547 (see TDF_* in dumpfile.h). */
2549 void
2550 gimple_debug_cfg (dump_flags_t flags)
2552 gimple_dump_cfg (stderr, flags);
2556 /* Dump the program showing basic block boundaries on the given FILE.
2558 FLAGS are the same used by the tree dumping functions (see TDF_* in
2559 tree.h). */
2561 void
2562 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2564 if (flags & TDF_DETAILS)
2566 dump_function_header (file, current_function_decl, flags);
2567 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2568 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2569 last_basic_block_for_fn (cfun));
2571 brief_dump_cfg (file, flags);
2572 fprintf (file, "\n");
2575 if (flags & TDF_STATS)
2576 dump_cfg_stats (file);
2578 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2582 /* Dump CFG statistics on FILE. */
2584 void
2585 dump_cfg_stats (FILE *file)
2587 static long max_num_merged_labels = 0;
2588 unsigned long size, total = 0;
2589 long num_edges;
2590 basic_block bb;
2591 const char * const fmt_str = "%-30s%-13s%12s\n";
2592 const char * const fmt_str_1 = "%-30s%13d" PRsa (11) "\n";
2593 const char * const fmt_str_2 = "%-30s%13ld" PRsa (11) "\n";
2594 const char * const fmt_str_3 = "%-43s" PRsa (11) "\n";
2595 const char *funcname = current_function_name ();
2597 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2599 fprintf (file, "---------------------------------------------------------\n");
2600 fprintf (file, fmt_str, "", " Number of ", "Memory");
2601 fprintf (file, fmt_str, "", " instances ", "used ");
2602 fprintf (file, "---------------------------------------------------------\n");
2604 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2605 total += size;
2606 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2607 SIZE_AMOUNT (size));
2609 num_edges = 0;
2610 FOR_EACH_BB_FN (bb, cfun)
2611 num_edges += EDGE_COUNT (bb->succs);
2612 size = num_edges * sizeof (class edge_def);
2613 total += size;
2614 fprintf (file, fmt_str_2, "Edges", num_edges, SIZE_AMOUNT (size));
2616 fprintf (file, "---------------------------------------------------------\n");
2617 fprintf (file, fmt_str_3, "Total memory used by CFG data",
2618 SIZE_AMOUNT (total));
2619 fprintf (file, "---------------------------------------------------------\n");
2620 fprintf (file, "\n");
2622 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2623 max_num_merged_labels = cfg_stats.num_merged_labels;
2625 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2626 cfg_stats.num_merged_labels, max_num_merged_labels);
2628 fprintf (file, "\n");
2632 /* Dump CFG statistics on stderr. Keep extern so that it's always
2633 linked in the final executable. */
2635 DEBUG_FUNCTION void
2636 debug_cfg_stats (void)
2638 dump_cfg_stats (stderr);
2641 /*---------------------------------------------------------------------------
2642 Miscellaneous helpers
2643 ---------------------------------------------------------------------------*/
2645 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2646 flow. Transfers of control flow associated with EH are excluded. */
2648 static bool
2649 call_can_make_abnormal_goto (gimple *t)
2651 /* If the function has no non-local labels, then a call cannot make an
2652 abnormal transfer of control. */
2653 if (!cfun->has_nonlocal_label
2654 && !cfun->calls_setjmp)
2655 return false;
2657 /* Likewise if the call has no side effects. */
2658 if (!gimple_has_side_effects (t))
2659 return false;
2661 /* Likewise if the called function is leaf. */
2662 if (gimple_call_flags (t) & ECF_LEAF)
2663 return false;
2665 return true;
2669 /* Return true if T can make an abnormal transfer of control flow.
2670 Transfers of control flow associated with EH are excluded. */
2672 bool
2673 stmt_can_make_abnormal_goto (gimple *t)
2675 if (computed_goto_p (t))
2676 return true;
2677 if (is_gimple_call (t))
2678 return call_can_make_abnormal_goto (t);
2679 return false;
2683 /* Return true if T represents a stmt that always transfers control. */
2685 bool
2686 is_ctrl_stmt (gimple *t)
2688 switch (gimple_code (t))
2690 case GIMPLE_COND:
2691 case GIMPLE_SWITCH:
2692 case GIMPLE_GOTO:
2693 case GIMPLE_RETURN:
2694 case GIMPLE_RESX:
2695 return true;
2696 default:
2697 return false;
2702 /* Return true if T is a statement that may alter the flow of control
2703 (e.g., a call to a non-returning function). */
2705 bool
2706 is_ctrl_altering_stmt (gimple *t)
2708 gcc_assert (t);
2710 switch (gimple_code (t))
2712 case GIMPLE_CALL:
2713 /* Per stmt call flag indicates whether the call could alter
2714 controlflow. */
2715 if (gimple_call_ctrl_altering_p (t))
2716 return true;
2717 break;
2719 case GIMPLE_EH_DISPATCH:
2720 /* EH_DISPATCH branches to the individual catch handlers at
2721 this level of a try or allowed-exceptions region. It can
2722 fallthru to the next statement as well. */
2723 return true;
2725 case GIMPLE_ASM:
2726 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2727 return true;
2728 break;
2730 CASE_GIMPLE_OMP:
2731 /* OpenMP directives alter control flow. */
2732 return true;
2734 case GIMPLE_TRANSACTION:
2735 /* A transaction start alters control flow. */
2736 return true;
2738 default:
2739 break;
2742 /* If a statement can throw, it alters control flow. */
2743 return stmt_can_throw_internal (cfun, t);
2747 /* Return true if T is a simple local goto. */
2749 bool
2750 simple_goto_p (gimple *t)
2752 return (gimple_code (t) == GIMPLE_GOTO
2753 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2757 /* Return true if STMT should start a new basic block. PREV_STMT is
2758 the statement preceding STMT. It is used when STMT is a label or a
2759 case label. Labels should only start a new basic block if their
2760 previous statement wasn't a label. Otherwise, sequence of labels
2761 would generate unnecessary basic blocks that only contain a single
2762 label. */
2764 static inline bool
2765 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2767 if (stmt == NULL)
2768 return false;
2770 /* PREV_STMT is only set to a debug stmt if the debug stmt is before
2771 any nondebug stmts in the block. We don't want to start another
2772 block in this case: the debug stmt will already have started the
2773 one STMT would start if we weren't outputting debug stmts. */
2774 if (prev_stmt && is_gimple_debug (prev_stmt))
2775 return false;
2777 /* Labels start a new basic block only if the preceding statement
2778 wasn't a label of the same type. This prevents the creation of
2779 consecutive blocks that have nothing but a single label. */
2780 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2782 /* Nonlocal and computed GOTO targets always start a new block. */
2783 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2784 || FORCED_LABEL (gimple_label_label (label_stmt)))
2785 return true;
2787 if (glabel *plabel = safe_dyn_cast <glabel *> (prev_stmt))
2789 if (DECL_NONLOCAL (gimple_label_label (plabel))
2790 || !DECL_ARTIFICIAL (gimple_label_label (plabel)))
2791 return true;
2793 cfg_stats.num_merged_labels++;
2794 return false;
2796 else
2797 return true;
2799 else if (gimple_code (stmt) == GIMPLE_CALL)
2801 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2802 /* setjmp acts similar to a nonlocal GOTO target and thus should
2803 start a new block. */
2804 return true;
2805 if (gimple_call_internal_p (stmt, IFN_PHI)
2806 && prev_stmt
2807 && gimple_code (prev_stmt) != GIMPLE_LABEL
2808 && (gimple_code (prev_stmt) != GIMPLE_CALL
2809 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2810 /* PHI nodes start a new block unless preceeded by a label
2811 or another PHI. */
2812 return true;
2815 return false;
2819 /* Return true if T should end a basic block. */
2821 bool
2822 stmt_ends_bb_p (gimple *t)
2824 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2827 /* Remove block annotations and other data structures. */
2829 void
2830 delete_tree_cfg_annotations (struct function *fn)
2832 vec_free (label_to_block_map_for_fn (fn));
2835 /* Return the virtual phi in BB. */
2837 gphi *
2838 get_virtual_phi (basic_block bb)
2840 for (gphi_iterator gsi = gsi_start_phis (bb);
2841 !gsi_end_p (gsi);
2842 gsi_next (&gsi))
2844 gphi *phi = gsi.phi ();
2846 if (virtual_operand_p (PHI_RESULT (phi)))
2847 return phi;
2850 return NULL;
2853 /* Return the first statement in basic block BB. */
2855 gimple *
2856 first_stmt (basic_block bb)
2858 gimple_stmt_iterator i = gsi_start_bb (bb);
2859 gimple *stmt = NULL;
2861 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2863 gsi_next (&i);
2864 stmt = NULL;
2866 return stmt;
2869 /* Return the first non-label statement in basic block BB. */
2871 static gimple *
2872 first_non_label_stmt (basic_block bb)
2874 gimple_stmt_iterator i = gsi_start_bb (bb);
2875 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2876 gsi_next (&i);
2877 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2880 /* Return the last statement in basic block BB. */
2882 gimple *
2883 last_stmt (basic_block bb)
2885 gimple_stmt_iterator i = gsi_last_bb (bb);
2886 gimple *stmt = NULL;
2888 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2890 gsi_prev (&i);
2891 stmt = NULL;
2893 return stmt;
2896 /* Return the last statement of an otherwise empty block. Return NULL
2897 if the block is totally empty, or if it contains more than one
2898 statement. */
2900 gimple *
2901 last_and_only_stmt (basic_block bb)
2903 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2904 gimple *last, *prev;
2906 if (gsi_end_p (i))
2907 return NULL;
2909 last = gsi_stmt (i);
2910 gsi_prev_nondebug (&i);
2911 if (gsi_end_p (i))
2912 return last;
2914 /* Empty statements should no longer appear in the instruction stream.
2915 Everything that might have appeared before should be deleted by
2916 remove_useless_stmts, and the optimizers should just gsi_remove
2917 instead of smashing with build_empty_stmt.
2919 Thus the only thing that should appear here in a block containing
2920 one executable statement is a label. */
2921 prev = gsi_stmt (i);
2922 if (gimple_code (prev) == GIMPLE_LABEL)
2923 return last;
2924 else
2925 return NULL;
2928 /* Returns the basic block after which the new basic block created
2929 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2930 near its "logical" location. This is of most help to humans looking
2931 at debugging dumps. */
2933 basic_block
2934 split_edge_bb_loc (edge edge_in)
2936 basic_block dest = edge_in->dest;
2937 basic_block dest_prev = dest->prev_bb;
2939 if (dest_prev)
2941 edge e = find_edge (dest_prev, dest);
2942 if (e && !(e->flags & EDGE_COMPLEX))
2943 return edge_in->src;
2945 return dest_prev;
2948 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2949 Abort on abnormal edges. */
2951 static basic_block
2952 gimple_split_edge (edge edge_in)
2954 basic_block new_bb, after_bb, dest;
2955 edge new_edge, e;
2957 /* Abnormal edges cannot be split. */
2958 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2960 dest = edge_in->dest;
2962 after_bb = split_edge_bb_loc (edge_in);
2964 new_bb = create_empty_bb (after_bb);
2965 new_bb->count = edge_in->count ();
2967 /* We want to avoid re-allocating PHIs when we first
2968 add the fallthru edge from new_bb to dest but we also
2969 want to avoid changing PHI argument order when
2970 first redirecting edge_in away from dest. The former
2971 avoids changing PHI argument order by adding them
2972 last and then the redirection swapping it back into
2973 place by means of unordered remove.
2974 So hack around things by temporarily removing all PHIs
2975 from the destination during the edge redirection and then
2976 making sure the edges stay in order. */
2977 gimple_seq saved_phis = phi_nodes (dest);
2978 unsigned old_dest_idx = edge_in->dest_idx;
2979 set_phi_nodes (dest, NULL);
2980 new_edge = make_single_succ_edge (new_bb, dest, EDGE_FALLTHRU);
2981 e = redirect_edge_and_branch (edge_in, new_bb);
2982 gcc_assert (e == edge_in && new_edge->dest_idx == old_dest_idx);
2983 /* set_phi_nodes sets the BB of the PHI nodes, so do it manually here. */
2984 dest->il.gimple.phi_nodes = saved_phis;
2986 return new_bb;
2990 /* Verify properties of the address expression T whose base should be
2991 TREE_ADDRESSABLE if VERIFY_ADDRESSABLE is true. */
2993 static bool
2994 verify_address (tree t, bool verify_addressable)
2996 bool old_constant;
2997 bool old_side_effects;
2998 bool new_constant;
2999 bool new_side_effects;
3001 old_constant = TREE_CONSTANT (t);
3002 old_side_effects = TREE_SIDE_EFFECTS (t);
3004 recompute_tree_invariant_for_addr_expr (t);
3005 new_side_effects = TREE_SIDE_EFFECTS (t);
3006 new_constant = TREE_CONSTANT (t);
3008 if (old_constant != new_constant)
3010 error ("constant not recomputed when %<ADDR_EXPR%> changed");
3011 return true;
3013 if (old_side_effects != new_side_effects)
3015 error ("side effects not recomputed when %<ADDR_EXPR%> changed");
3016 return true;
3019 tree base = TREE_OPERAND (t, 0);
3020 while (handled_component_p (base))
3021 base = TREE_OPERAND (base, 0);
3023 if (!(VAR_P (base)
3024 || TREE_CODE (base) == PARM_DECL
3025 || TREE_CODE (base) == RESULT_DECL))
3026 return false;
3028 if (verify_addressable && !TREE_ADDRESSABLE (base))
3030 error ("address taken but %<TREE_ADDRESSABLE%> bit not set");
3031 return true;
3034 return false;
3038 /* Verify if EXPR is a valid GIMPLE reference expression. If
3039 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3040 if there is an error, otherwise false. */
3042 static bool
3043 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3045 const char *code_name = get_tree_code_name (TREE_CODE (expr));
3047 if (TREE_CODE (expr) == REALPART_EXPR
3048 || TREE_CODE (expr) == IMAGPART_EXPR
3049 || TREE_CODE (expr) == BIT_FIELD_REF)
3051 tree op = TREE_OPERAND (expr, 0);
3052 if (!is_gimple_reg_type (TREE_TYPE (expr)))
3054 error ("non-scalar %qs", code_name);
3055 return true;
3058 if (TREE_CODE (expr) == BIT_FIELD_REF)
3060 tree t1 = TREE_OPERAND (expr, 1);
3061 tree t2 = TREE_OPERAND (expr, 2);
3062 poly_uint64 size, bitpos;
3063 if (!poly_int_tree_p (t1, &size)
3064 || !poly_int_tree_p (t2, &bitpos)
3065 || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
3066 || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
3068 error ("invalid position or size operand to %qs", code_name);
3069 return true;
3071 if (INTEGRAL_TYPE_P (TREE_TYPE (expr))
3072 && maybe_ne (TYPE_PRECISION (TREE_TYPE (expr)), size))
3074 error ("integral result type precision does not match "
3075 "field size of %qs", code_name);
3076 return true;
3078 else if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
3079 && TYPE_MODE (TREE_TYPE (expr)) != BLKmode
3080 && maybe_ne (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (expr))),
3081 size))
3083 error ("mode size of non-integral result does not "
3084 "match field size of %qs",
3085 code_name);
3086 return true;
3088 if (INTEGRAL_TYPE_P (TREE_TYPE (op))
3089 && !type_has_mode_precision_p (TREE_TYPE (op)))
3091 error ("%qs of non-mode-precision operand", code_name);
3092 return true;
3094 if (!AGGREGATE_TYPE_P (TREE_TYPE (op))
3095 && maybe_gt (size + bitpos,
3096 tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (op)))))
3098 error ("position plus size exceeds size of referenced object in "
3099 "%qs", code_name);
3100 return true;
3104 if ((TREE_CODE (expr) == REALPART_EXPR
3105 || TREE_CODE (expr) == IMAGPART_EXPR)
3106 && !useless_type_conversion_p (TREE_TYPE (expr),
3107 TREE_TYPE (TREE_TYPE (op))))
3109 error ("type mismatch in %qs reference", code_name);
3110 debug_generic_stmt (TREE_TYPE (expr));
3111 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3112 return true;
3114 expr = op;
3117 while (handled_component_p (expr))
3119 code_name = get_tree_code_name (TREE_CODE (expr));
3121 if (TREE_CODE (expr) == REALPART_EXPR
3122 || TREE_CODE (expr) == IMAGPART_EXPR
3123 || TREE_CODE (expr) == BIT_FIELD_REF)
3125 error ("non-top-level %qs", code_name);
3126 return true;
3129 tree op = TREE_OPERAND (expr, 0);
3131 if (TREE_CODE (expr) == ARRAY_REF
3132 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3134 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3135 || (TREE_OPERAND (expr, 2)
3136 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3137 || (TREE_OPERAND (expr, 3)
3138 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3140 error ("invalid operands to %qs", code_name);
3141 debug_generic_stmt (expr);
3142 return true;
3146 /* Verify if the reference array element types are compatible. */
3147 if (TREE_CODE (expr) == ARRAY_REF
3148 && !useless_type_conversion_p (TREE_TYPE (expr),
3149 TREE_TYPE (TREE_TYPE (op))))
3151 error ("type mismatch in %qs", code_name);
3152 debug_generic_stmt (TREE_TYPE (expr));
3153 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3154 return true;
3156 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3157 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3158 TREE_TYPE (TREE_TYPE (op))))
3160 error ("type mismatch in %qs", code_name);
3161 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3162 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3163 return true;
3166 if (TREE_CODE (expr) == COMPONENT_REF)
3168 if (TREE_OPERAND (expr, 2)
3169 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3171 error ("invalid %qs offset operator", code_name);
3172 return true;
3174 if (!useless_type_conversion_p (TREE_TYPE (expr),
3175 TREE_TYPE (TREE_OPERAND (expr, 1))))
3177 error ("type mismatch in %qs", code_name);
3178 debug_generic_stmt (TREE_TYPE (expr));
3179 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3180 return true;
3184 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3186 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3187 that their operand is not an SSA name or an invariant when
3188 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3189 bug). Otherwise there is nothing to verify, gross mismatches at
3190 most invoke undefined behavior. */
3191 if (require_lvalue
3192 && (TREE_CODE (op) == SSA_NAME
3193 || is_gimple_min_invariant (op)))
3195 error ("conversion of %qs on the left hand side of %qs",
3196 get_tree_code_name (TREE_CODE (op)), code_name);
3197 debug_generic_stmt (expr);
3198 return true;
3200 else if (TREE_CODE (op) == SSA_NAME
3201 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3203 error ("conversion of register to a different size in %qs",
3204 code_name);
3205 debug_generic_stmt (expr);
3206 return true;
3208 else if (!handled_component_p (op))
3209 return false;
3212 expr = op;
3215 code_name = get_tree_code_name (TREE_CODE (expr));
3217 if (TREE_CODE (expr) == MEM_REF)
3219 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0))
3220 || (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
3221 && verify_address (TREE_OPERAND (expr, 0), false)))
3223 error ("invalid address operand in %qs", code_name);
3224 debug_generic_stmt (expr);
3225 return true;
3227 if (!poly_int_tree_p (TREE_OPERAND (expr, 1))
3228 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3230 error ("invalid offset operand in %qs", code_name);
3231 debug_generic_stmt (expr);
3232 return true;
3234 if (MR_DEPENDENCE_CLIQUE (expr) != 0
3235 && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3237 error ("invalid clique in %qs", code_name);
3238 debug_generic_stmt (expr);
3239 return true;
3242 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3244 if (!TMR_BASE (expr)
3245 || !is_gimple_mem_ref_addr (TMR_BASE (expr))
3246 || (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
3247 && verify_address (TMR_BASE (expr), false)))
3249 error ("invalid address operand in %qs", code_name);
3250 return true;
3252 if (!TMR_OFFSET (expr)
3253 || !poly_int_tree_p (TMR_OFFSET (expr))
3254 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3256 error ("invalid offset operand in %qs", code_name);
3257 debug_generic_stmt (expr);
3258 return true;
3260 if (MR_DEPENDENCE_CLIQUE (expr) != 0
3261 && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3263 error ("invalid clique in %qs", code_name);
3264 debug_generic_stmt (expr);
3265 return true;
3268 else if (TREE_CODE (expr) == INDIRECT_REF)
3270 error ("%qs in gimple IL", code_name);
3271 debug_generic_stmt (expr);
3272 return true;
3275 if (!require_lvalue
3276 && (TREE_CODE (expr) == SSA_NAME || is_gimple_min_invariant (expr)))
3277 return false;
3279 if (TREE_CODE (expr) != SSA_NAME && is_gimple_id (expr))
3280 return false;
3282 if (TREE_CODE (expr) != TARGET_MEM_REF
3283 && TREE_CODE (expr) != MEM_REF)
3285 error ("invalid expression for min lvalue");
3286 return true;
3289 return false;
3292 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3293 list of pointer-to types that is trivially convertible to DEST. */
3295 static bool
3296 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3298 tree src;
3300 if (!TYPE_POINTER_TO (src_obj))
3301 return true;
3303 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3304 if (useless_type_conversion_p (dest, src))
3305 return true;
3307 return false;
3310 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3311 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3313 static bool
3314 valid_fixed_convert_types_p (tree type1, tree type2)
3316 return (FIXED_POINT_TYPE_P (type1)
3317 && (INTEGRAL_TYPE_P (type2)
3318 || SCALAR_FLOAT_TYPE_P (type2)
3319 || FIXED_POINT_TYPE_P (type2)));
3322 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3323 is a problem, otherwise false. */
3325 static bool
3326 verify_gimple_call (gcall *stmt)
3328 tree fn = gimple_call_fn (stmt);
3329 tree fntype, fndecl;
3330 unsigned i;
3332 if (gimple_call_internal_p (stmt))
3334 if (fn)
3336 error ("gimple call has two targets");
3337 debug_generic_stmt (fn);
3338 return true;
3341 else
3343 if (!fn)
3345 error ("gimple call has no target");
3346 return true;
3350 if (fn && !is_gimple_call_addr (fn))
3352 error ("invalid function in gimple call");
3353 debug_generic_stmt (fn);
3354 return true;
3357 if (fn
3358 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3359 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3360 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3362 error ("non-function in gimple call");
3363 return true;
3366 fndecl = gimple_call_fndecl (stmt);
3367 if (fndecl
3368 && TREE_CODE (fndecl) == FUNCTION_DECL
3369 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3370 && !DECL_PURE_P (fndecl)
3371 && !TREE_READONLY (fndecl))
3373 error ("invalid pure const state for function");
3374 return true;
3377 tree lhs = gimple_call_lhs (stmt);
3378 if (lhs
3379 && (!is_gimple_reg (lhs)
3380 && (!is_gimple_lvalue (lhs)
3381 || verify_types_in_gimple_reference
3382 (TREE_CODE (lhs) == WITH_SIZE_EXPR
3383 ? TREE_OPERAND (lhs, 0) : lhs, true))))
3385 error ("invalid LHS in gimple call");
3386 return true;
3389 if (gimple_call_ctrl_altering_p (stmt)
3390 && gimple_call_noreturn_p (stmt)
3391 && should_remove_lhs_p (lhs))
3393 error ("LHS in %<noreturn%> call");
3394 return true;
3397 fntype = gimple_call_fntype (stmt);
3398 if (fntype
3399 && lhs
3400 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3401 /* ??? At least C++ misses conversions at assignments from
3402 void * call results.
3403 For now simply allow arbitrary pointer type conversions. */
3404 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3405 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3407 error ("invalid conversion in gimple call");
3408 debug_generic_stmt (TREE_TYPE (lhs));
3409 debug_generic_stmt (TREE_TYPE (fntype));
3410 return true;
3413 if (gimple_call_chain (stmt)
3414 && !is_gimple_val (gimple_call_chain (stmt)))
3416 error ("invalid static chain in gimple call");
3417 debug_generic_stmt (gimple_call_chain (stmt));
3418 return true;
3421 /* If there is a static chain argument, the call should either be
3422 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3423 if (gimple_call_chain (stmt)
3424 && fndecl
3425 && !DECL_STATIC_CHAIN (fndecl))
3427 error ("static chain with function that doesn%'t use one");
3428 return true;
3431 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3433 switch (DECL_FUNCTION_CODE (fndecl))
3435 case BUILT_IN_UNREACHABLE:
3436 case BUILT_IN_TRAP:
3437 if (gimple_call_num_args (stmt) > 0)
3439 /* Built-in unreachable with parameters might not be caught by
3440 undefined behavior sanitizer. Front-ends do check users do not
3441 call them that way but we also produce calls to
3442 __builtin_unreachable internally, for example when IPA figures
3443 out a call cannot happen in a legal program. In such cases,
3444 we must make sure arguments are stripped off. */
3445 error ("%<__builtin_unreachable%> or %<__builtin_trap%> call "
3446 "with arguments");
3447 return true;
3449 break;
3450 default:
3451 break;
3455 /* For a call to .DEFERRED_INIT,
3456 LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, IS_VLA)
3457 we should guarantee that the 1st and the 3rd arguments are consistent:
3458 1st argument: SIZE of the DECL;
3459 3rd argument: IS_VLA, 0 NO, 1 YES;
3461 if IS_VLA is false, the 1st argument should be a constant and the same as
3462 the size of the LHS. */
3463 if (gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
3465 tree size_of_arg0 = gimple_call_arg (stmt, 0);
3466 tree size_of_lhs = TYPE_SIZE_UNIT (TREE_TYPE (lhs));
3467 tree is_vla_node = gimple_call_arg (stmt, 2);
3468 bool is_vla = (bool) TREE_INT_CST_LOW (is_vla_node);
3470 if (TREE_CODE (lhs) == SSA_NAME)
3471 lhs = SSA_NAME_VAR (lhs);
3473 poly_uint64 size_from_arg0, size_from_lhs;
3474 bool is_constant_size_arg0 = poly_int_tree_p (size_of_arg0,
3475 &size_from_arg0);
3476 bool is_constant_size_lhs = poly_int_tree_p (size_of_lhs,
3477 &size_from_lhs);
3478 if (!is_vla)
3480 if (!is_constant_size_arg0)
3482 error ("%<DEFFERED_INIT%> calls for non-VLA should have "
3483 "constant size for the first argument");
3484 return true;
3486 else if (!is_constant_size_lhs)
3488 error ("%<DEFFERED_INIT%> calls for non-VLA should have "
3489 "constant size for the LHS");
3490 return true;
3492 else if (maybe_ne (size_from_arg0, size_from_lhs))
3494 error ("%<DEFFERED_INIT%> calls for non-VLA should have same "
3495 "constant size for the first argument and LHS");
3496 return true;
3501 /* ??? The C frontend passes unpromoted arguments in case it
3502 didn't see a function declaration before the call. So for now
3503 leave the call arguments mostly unverified. Once we gimplify
3504 unit-at-a-time we have a chance to fix this. */
3505 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3507 tree arg = gimple_call_arg (stmt, i);
3508 if ((is_gimple_reg_type (TREE_TYPE (arg))
3509 && !is_gimple_val (arg))
3510 || (!is_gimple_reg_type (TREE_TYPE (arg))
3511 && !is_gimple_lvalue (arg)))
3513 error ("invalid argument to gimple call");
3514 debug_generic_expr (arg);
3515 return true;
3517 if (!is_gimple_reg (arg))
3519 if (TREE_CODE (arg) == WITH_SIZE_EXPR)
3520 arg = TREE_OPERAND (arg, 0);
3521 if (verify_types_in_gimple_reference (arg, false))
3522 return true;
3526 return false;
3529 /* Verifies the gimple comparison with the result type TYPE and
3530 the operands OP0 and OP1, comparison code is CODE. */
3532 static bool
3533 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3535 tree op0_type = TREE_TYPE (op0);
3536 tree op1_type = TREE_TYPE (op1);
3538 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3540 error ("invalid operands in gimple comparison");
3541 return true;
3544 /* For comparisons we do not have the operations type as the
3545 effective type the comparison is carried out in. Instead
3546 we require that either the first operand is trivially
3547 convertible into the second, or the other way around. */
3548 if (!useless_type_conversion_p (op0_type, op1_type)
3549 && !useless_type_conversion_p (op1_type, op0_type))
3551 error ("mismatching comparison operand types");
3552 debug_generic_expr (op0_type);
3553 debug_generic_expr (op1_type);
3554 return true;
3557 /* The resulting type of a comparison may be an effective boolean type. */
3558 if (INTEGRAL_TYPE_P (type)
3559 && (TREE_CODE (type) == BOOLEAN_TYPE
3560 || TYPE_PRECISION (type) == 1))
3562 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3563 || TREE_CODE (op1_type) == VECTOR_TYPE)
3564 && code != EQ_EXPR && code != NE_EXPR
3565 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3566 && !VECTOR_INTEGER_TYPE_P (op0_type))
3568 error ("unsupported operation or type for vector comparison"
3569 " returning a boolean");
3570 debug_generic_expr (op0_type);
3571 debug_generic_expr (op1_type);
3572 return true;
3575 /* Or a boolean vector type with the same element count
3576 as the comparison operand types. */
3577 else if (TREE_CODE (type) == VECTOR_TYPE
3578 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3580 if (TREE_CODE (op0_type) != VECTOR_TYPE
3581 || TREE_CODE (op1_type) != VECTOR_TYPE)
3583 error ("non-vector operands in vector comparison");
3584 debug_generic_expr (op0_type);
3585 debug_generic_expr (op1_type);
3586 return true;
3589 if (maybe_ne (TYPE_VECTOR_SUBPARTS (type),
3590 TYPE_VECTOR_SUBPARTS (op0_type)))
3592 error ("invalid vector comparison resulting type");
3593 debug_generic_expr (type);
3594 return true;
3597 else
3599 error ("bogus comparison result type");
3600 debug_generic_expr (type);
3601 return true;
3604 return false;
3607 /* Verify a gimple assignment statement STMT with an unary rhs.
3608 Returns true if anything is wrong. */
3610 static bool
3611 verify_gimple_assign_unary (gassign *stmt)
3613 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3614 tree lhs = gimple_assign_lhs (stmt);
3615 tree lhs_type = TREE_TYPE (lhs);
3616 tree rhs1 = gimple_assign_rhs1 (stmt);
3617 tree rhs1_type = TREE_TYPE (rhs1);
3619 if (!is_gimple_reg (lhs))
3621 error ("non-register as LHS of unary operation");
3622 return true;
3625 if (!is_gimple_val (rhs1))
3627 error ("invalid operand in unary operation");
3628 return true;
3631 const char* const code_name = get_tree_code_name (rhs_code);
3633 /* First handle conversions. */
3634 switch (rhs_code)
3636 CASE_CONVERT:
3638 /* Allow conversions between vectors with the same number of elements,
3639 provided that the conversion is OK for the element types too. */
3640 if (VECTOR_TYPE_P (lhs_type)
3641 && VECTOR_TYPE_P (rhs1_type)
3642 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
3643 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3645 lhs_type = TREE_TYPE (lhs_type);
3646 rhs1_type = TREE_TYPE (rhs1_type);
3648 else if (VECTOR_TYPE_P (lhs_type) || VECTOR_TYPE_P (rhs1_type))
3650 error ("invalid vector types in nop conversion");
3651 debug_generic_expr (lhs_type);
3652 debug_generic_expr (rhs1_type);
3653 return true;
3656 /* Allow conversions from pointer type to integral type only if
3657 there is no sign or zero extension involved.
3658 For targets were the precision of ptrofftype doesn't match that
3659 of pointers we allow conversions to types where
3660 POINTERS_EXTEND_UNSIGNED specifies how that works. */
3661 if ((POINTER_TYPE_P (lhs_type)
3662 && INTEGRAL_TYPE_P (rhs1_type))
3663 || (POINTER_TYPE_P (rhs1_type)
3664 && INTEGRAL_TYPE_P (lhs_type)
3665 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3666 #if defined(POINTERS_EXTEND_UNSIGNED)
3667 || (TYPE_MODE (rhs1_type) == ptr_mode
3668 && (TYPE_PRECISION (lhs_type)
3669 == BITS_PER_WORD /* word_mode */
3670 || (TYPE_PRECISION (lhs_type)
3671 == GET_MODE_PRECISION (Pmode))))
3672 #endif
3674 return false;
3676 /* Allow conversion from integral to offset type and vice versa. */
3677 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3678 && INTEGRAL_TYPE_P (rhs1_type))
3679 || (INTEGRAL_TYPE_P (lhs_type)
3680 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3681 return false;
3683 /* Otherwise assert we are converting between types of the
3684 same kind. */
3685 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3687 error ("invalid types in nop conversion");
3688 debug_generic_expr (lhs_type);
3689 debug_generic_expr (rhs1_type);
3690 return true;
3693 return false;
3696 case ADDR_SPACE_CONVERT_EXPR:
3698 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3699 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3700 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3702 error ("invalid types in address space conversion");
3703 debug_generic_expr (lhs_type);
3704 debug_generic_expr (rhs1_type);
3705 return true;
3708 return false;
3711 case FIXED_CONVERT_EXPR:
3713 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3714 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3716 error ("invalid types in fixed-point conversion");
3717 debug_generic_expr (lhs_type);
3718 debug_generic_expr (rhs1_type);
3719 return true;
3722 return false;
3725 case FLOAT_EXPR:
3727 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3728 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3729 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3731 error ("invalid types in conversion to floating-point");
3732 debug_generic_expr (lhs_type);
3733 debug_generic_expr (rhs1_type);
3734 return true;
3737 return false;
3740 case FIX_TRUNC_EXPR:
3742 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3743 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3744 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3746 error ("invalid types in conversion to integer");
3747 debug_generic_expr (lhs_type);
3748 debug_generic_expr (rhs1_type);
3749 return true;
3752 return false;
3755 case VEC_UNPACK_HI_EXPR:
3756 case VEC_UNPACK_LO_EXPR:
3757 case VEC_UNPACK_FLOAT_HI_EXPR:
3758 case VEC_UNPACK_FLOAT_LO_EXPR:
3759 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
3760 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
3761 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3762 || TREE_CODE (lhs_type) != VECTOR_TYPE
3763 || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3764 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type)))
3765 || (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3766 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3767 || ((rhs_code == VEC_UNPACK_HI_EXPR
3768 || rhs_code == VEC_UNPACK_LO_EXPR)
3769 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3770 != INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3771 || ((rhs_code == VEC_UNPACK_FLOAT_HI_EXPR
3772 || rhs_code == VEC_UNPACK_FLOAT_LO_EXPR)
3773 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3774 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))))
3775 || ((rhs_code == VEC_UNPACK_FIX_TRUNC_HI_EXPR
3776 || rhs_code == VEC_UNPACK_FIX_TRUNC_LO_EXPR)
3777 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3778 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))))
3779 || (maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
3780 2 * GET_MODE_SIZE (element_mode (rhs1_type)))
3781 && (!VECTOR_BOOLEAN_TYPE_P (lhs_type)
3782 || !VECTOR_BOOLEAN_TYPE_P (rhs1_type)))
3783 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (lhs_type),
3784 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3786 error ("type mismatch in %qs expression", code_name);
3787 debug_generic_expr (lhs_type);
3788 debug_generic_expr (rhs1_type);
3789 return true;
3792 return false;
3794 case NEGATE_EXPR:
3795 case ABS_EXPR:
3796 case BIT_NOT_EXPR:
3797 case PAREN_EXPR:
3798 case CONJ_EXPR:
3799 /* Disallow pointer and offset types for many of the unary gimple. */
3800 if (POINTER_TYPE_P (lhs_type)
3801 || TREE_CODE (lhs_type) == OFFSET_TYPE)
3803 error ("invalid types for %qs", code_name);
3804 debug_generic_expr (lhs_type);
3805 debug_generic_expr (rhs1_type);
3806 return true;
3808 break;
3810 case ABSU_EXPR:
3811 if (!ANY_INTEGRAL_TYPE_P (lhs_type)
3812 || !TYPE_UNSIGNED (lhs_type)
3813 || !ANY_INTEGRAL_TYPE_P (rhs1_type)
3814 || TYPE_UNSIGNED (rhs1_type)
3815 || element_precision (lhs_type) != element_precision (rhs1_type))
3817 error ("invalid types for %qs", code_name);
3818 debug_generic_expr (lhs_type);
3819 debug_generic_expr (rhs1_type);
3820 return true;
3822 return false;
3824 case VEC_DUPLICATE_EXPR:
3825 if (TREE_CODE (lhs_type) != VECTOR_TYPE
3826 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
3828 error ("%qs should be from a scalar to a like vector", code_name);
3829 debug_generic_expr (lhs_type);
3830 debug_generic_expr (rhs1_type);
3831 return true;
3833 return false;
3835 default:
3836 gcc_unreachable ();
3839 /* For the remaining codes assert there is no conversion involved. */
3840 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3842 error ("non-trivial conversion in unary operation");
3843 debug_generic_expr (lhs_type);
3844 debug_generic_expr (rhs1_type);
3845 return true;
3848 return false;
3851 /* Verify a gimple assignment statement STMT with a binary rhs.
3852 Returns true if anything is wrong. */
3854 static bool
3855 verify_gimple_assign_binary (gassign *stmt)
3857 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3858 tree lhs = gimple_assign_lhs (stmt);
3859 tree lhs_type = TREE_TYPE (lhs);
3860 tree rhs1 = gimple_assign_rhs1 (stmt);
3861 tree rhs1_type = TREE_TYPE (rhs1);
3862 tree rhs2 = gimple_assign_rhs2 (stmt);
3863 tree rhs2_type = TREE_TYPE (rhs2);
3865 if (!is_gimple_reg (lhs))
3867 error ("non-register as LHS of binary operation");
3868 return true;
3871 if (!is_gimple_val (rhs1)
3872 || !is_gimple_val (rhs2))
3874 error ("invalid operands in binary operation");
3875 return true;
3878 const char* const code_name = get_tree_code_name (rhs_code);
3880 /* First handle operations that involve different types. */
3881 switch (rhs_code)
3883 case COMPLEX_EXPR:
3885 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3886 || !(INTEGRAL_TYPE_P (rhs1_type)
3887 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3888 || !(INTEGRAL_TYPE_P (rhs2_type)
3889 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3891 error ("type mismatch in %qs", code_name);
3892 debug_generic_expr (lhs_type);
3893 debug_generic_expr (rhs1_type);
3894 debug_generic_expr (rhs2_type);
3895 return true;
3898 return false;
3901 case LSHIFT_EXPR:
3902 case RSHIFT_EXPR:
3903 case LROTATE_EXPR:
3904 case RROTATE_EXPR:
3906 /* Shifts and rotates are ok on integral types, fixed point
3907 types and integer vector types. */
3908 if ((!INTEGRAL_TYPE_P (rhs1_type)
3909 && !FIXED_POINT_TYPE_P (rhs1_type)
3910 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3911 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3912 || (!INTEGRAL_TYPE_P (rhs2_type)
3913 /* Vector shifts of vectors are also ok. */
3914 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3915 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3916 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3917 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3918 || !useless_type_conversion_p (lhs_type, rhs1_type))
3920 error ("type mismatch in %qs", code_name);
3921 debug_generic_expr (lhs_type);
3922 debug_generic_expr (rhs1_type);
3923 debug_generic_expr (rhs2_type);
3924 return true;
3927 return false;
3930 case WIDEN_LSHIFT_EXPR:
3932 if (!INTEGRAL_TYPE_P (lhs_type)
3933 || !INTEGRAL_TYPE_P (rhs1_type)
3934 || TREE_CODE (rhs2) != INTEGER_CST
3935 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3937 error ("type mismatch in %qs", code_name);
3938 debug_generic_expr (lhs_type);
3939 debug_generic_expr (rhs1_type);
3940 debug_generic_expr (rhs2_type);
3941 return true;
3944 return false;
3947 case VEC_WIDEN_LSHIFT_HI_EXPR:
3948 case VEC_WIDEN_LSHIFT_LO_EXPR:
3950 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3951 || TREE_CODE (lhs_type) != VECTOR_TYPE
3952 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3953 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3954 || TREE_CODE (rhs2) != INTEGER_CST
3955 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3956 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3958 error ("type mismatch in %qs", code_name);
3959 debug_generic_expr (lhs_type);
3960 debug_generic_expr (rhs1_type);
3961 debug_generic_expr (rhs2_type);
3962 return true;
3965 return false;
3968 case WIDEN_PLUS_EXPR:
3969 case WIDEN_MINUS_EXPR:
3970 case PLUS_EXPR:
3971 case MINUS_EXPR:
3973 tree lhs_etype = lhs_type;
3974 tree rhs1_etype = rhs1_type;
3975 tree rhs2_etype = rhs2_type;
3976 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3978 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3979 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3981 error ("invalid non-vector operands to %qs", code_name);
3982 return true;
3984 lhs_etype = TREE_TYPE (lhs_type);
3985 rhs1_etype = TREE_TYPE (rhs1_type);
3986 rhs2_etype = TREE_TYPE (rhs2_type);
3988 if (POINTER_TYPE_P (lhs_etype)
3989 || POINTER_TYPE_P (rhs1_etype)
3990 || POINTER_TYPE_P (rhs2_etype))
3992 error ("invalid (pointer) operands %qs", code_name);
3993 return true;
3996 /* Continue with generic binary expression handling. */
3997 break;
4000 case POINTER_PLUS_EXPR:
4002 if (!POINTER_TYPE_P (rhs1_type)
4003 || !useless_type_conversion_p (lhs_type, rhs1_type)
4004 || !ptrofftype_p (rhs2_type))
4006 error ("type mismatch in %qs", code_name);
4007 debug_generic_stmt (lhs_type);
4008 debug_generic_stmt (rhs1_type);
4009 debug_generic_stmt (rhs2_type);
4010 return true;
4013 return false;
4016 case POINTER_DIFF_EXPR:
4018 if (!POINTER_TYPE_P (rhs1_type)
4019 || !POINTER_TYPE_P (rhs2_type)
4020 /* Because we special-case pointers to void we allow difference
4021 of arbitrary pointers with the same mode. */
4022 || TYPE_MODE (rhs1_type) != TYPE_MODE (rhs2_type)
4023 || !INTEGRAL_TYPE_P (lhs_type)
4024 || TYPE_UNSIGNED (lhs_type)
4025 || TYPE_PRECISION (lhs_type) != TYPE_PRECISION (rhs1_type))
4027 error ("type mismatch in %qs", code_name);
4028 debug_generic_stmt (lhs_type);
4029 debug_generic_stmt (rhs1_type);
4030 debug_generic_stmt (rhs2_type);
4031 return true;
4034 return false;
4037 case TRUTH_ANDIF_EXPR:
4038 case TRUTH_ORIF_EXPR:
4039 case TRUTH_AND_EXPR:
4040 case TRUTH_OR_EXPR:
4041 case TRUTH_XOR_EXPR:
4043 gcc_unreachable ();
4045 case LT_EXPR:
4046 case LE_EXPR:
4047 case GT_EXPR:
4048 case GE_EXPR:
4049 case EQ_EXPR:
4050 case NE_EXPR:
4051 case UNORDERED_EXPR:
4052 case ORDERED_EXPR:
4053 case UNLT_EXPR:
4054 case UNLE_EXPR:
4055 case UNGT_EXPR:
4056 case UNGE_EXPR:
4057 case UNEQ_EXPR:
4058 case LTGT_EXPR:
4059 /* Comparisons are also binary, but the result type is not
4060 connected to the operand types. */
4061 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
4063 case WIDEN_MULT_EXPR:
4064 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
4065 return true;
4066 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
4067 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
4069 case WIDEN_SUM_EXPR:
4071 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4072 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4073 && ((!INTEGRAL_TYPE_P (rhs1_type)
4074 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4075 || (!INTEGRAL_TYPE_P (lhs_type)
4076 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4077 || !useless_type_conversion_p (lhs_type, rhs2_type)
4078 || maybe_lt (GET_MODE_SIZE (element_mode (rhs2_type)),
4079 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4081 error ("type mismatch in %qs", code_name);
4082 debug_generic_expr (lhs_type);
4083 debug_generic_expr (rhs1_type);
4084 debug_generic_expr (rhs2_type);
4085 return true;
4087 return false;
4090 case VEC_WIDEN_MINUS_HI_EXPR:
4091 case VEC_WIDEN_MINUS_LO_EXPR:
4092 case VEC_WIDEN_PLUS_HI_EXPR:
4093 case VEC_WIDEN_PLUS_LO_EXPR:
4094 case VEC_WIDEN_MULT_HI_EXPR:
4095 case VEC_WIDEN_MULT_LO_EXPR:
4096 case VEC_WIDEN_MULT_EVEN_EXPR:
4097 case VEC_WIDEN_MULT_ODD_EXPR:
4099 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4100 || TREE_CODE (lhs_type) != VECTOR_TYPE
4101 || !types_compatible_p (rhs1_type, rhs2_type)
4102 || maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
4103 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4105 error ("type mismatch in %qs", code_name);
4106 debug_generic_expr (lhs_type);
4107 debug_generic_expr (rhs1_type);
4108 debug_generic_expr (rhs2_type);
4109 return true;
4111 return false;
4114 case VEC_PACK_TRUNC_EXPR:
4115 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
4116 vector boolean types. */
4117 if (VECTOR_BOOLEAN_TYPE_P (lhs_type)
4118 && VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4119 && types_compatible_p (rhs1_type, rhs2_type)
4120 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
4121 2 * TYPE_VECTOR_SUBPARTS (rhs1_type)))
4122 return false;
4124 /* Fallthru. */
4125 case VEC_PACK_SAT_EXPR:
4126 case VEC_PACK_FIX_TRUNC_EXPR:
4128 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4129 || TREE_CODE (lhs_type) != VECTOR_TYPE
4130 || !((rhs_code == VEC_PACK_FIX_TRUNC_EXPR
4131 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
4132 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type)))
4133 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4134 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))))
4135 || !types_compatible_p (rhs1_type, rhs2_type)
4136 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4137 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4138 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4139 TYPE_VECTOR_SUBPARTS (lhs_type)))
4141 error ("type mismatch in %qs", code_name);
4142 debug_generic_expr (lhs_type);
4143 debug_generic_expr (rhs1_type);
4144 debug_generic_expr (rhs2_type);
4145 return true;
4148 return false;
4151 case VEC_PACK_FLOAT_EXPR:
4152 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4153 || TREE_CODE (lhs_type) != VECTOR_TYPE
4154 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4155 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))
4156 || !types_compatible_p (rhs1_type, rhs2_type)
4157 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4158 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4159 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4160 TYPE_VECTOR_SUBPARTS (lhs_type)))
4162 error ("type mismatch in %qs", code_name);
4163 debug_generic_expr (lhs_type);
4164 debug_generic_expr (rhs1_type);
4165 debug_generic_expr (rhs2_type);
4166 return true;
4169 return false;
4171 case MULT_EXPR:
4172 case MULT_HIGHPART_EXPR:
4173 case TRUNC_DIV_EXPR:
4174 case CEIL_DIV_EXPR:
4175 case FLOOR_DIV_EXPR:
4176 case ROUND_DIV_EXPR:
4177 case TRUNC_MOD_EXPR:
4178 case CEIL_MOD_EXPR:
4179 case FLOOR_MOD_EXPR:
4180 case ROUND_MOD_EXPR:
4181 case RDIV_EXPR:
4182 case EXACT_DIV_EXPR:
4183 /* Disallow pointer and offset types for many of the binary gimple. */
4184 if (POINTER_TYPE_P (lhs_type)
4185 || TREE_CODE (lhs_type) == OFFSET_TYPE)
4187 error ("invalid types for %qs", code_name);
4188 debug_generic_expr (lhs_type);
4189 debug_generic_expr (rhs1_type);
4190 debug_generic_expr (rhs2_type);
4191 return true;
4193 /* Continue with generic binary expression handling. */
4194 break;
4196 case MIN_EXPR:
4197 case MAX_EXPR:
4198 case BIT_IOR_EXPR:
4199 case BIT_XOR_EXPR:
4200 case BIT_AND_EXPR:
4201 /* Continue with generic binary expression handling. */
4202 break;
4204 case VEC_SERIES_EXPR:
4205 if (!useless_type_conversion_p (rhs1_type, rhs2_type))
4207 error ("type mismatch in %qs", code_name);
4208 debug_generic_expr (rhs1_type);
4209 debug_generic_expr (rhs2_type);
4210 return true;
4212 if (TREE_CODE (lhs_type) != VECTOR_TYPE
4213 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
4215 error ("vector type expected in %qs", code_name);
4216 debug_generic_expr (lhs_type);
4217 return true;
4219 return false;
4221 default:
4222 gcc_unreachable ();
4225 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4226 || !useless_type_conversion_p (lhs_type, rhs2_type))
4228 error ("type mismatch in binary expression");
4229 debug_generic_stmt (lhs_type);
4230 debug_generic_stmt (rhs1_type);
4231 debug_generic_stmt (rhs2_type);
4232 return true;
4235 return false;
4238 /* Verify a gimple assignment statement STMT with a ternary rhs.
4239 Returns true if anything is wrong. */
4241 static bool
4242 verify_gimple_assign_ternary (gassign *stmt)
4244 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4245 tree lhs = gimple_assign_lhs (stmt);
4246 tree lhs_type = TREE_TYPE (lhs);
4247 tree rhs1 = gimple_assign_rhs1 (stmt);
4248 tree rhs1_type = TREE_TYPE (rhs1);
4249 tree rhs2 = gimple_assign_rhs2 (stmt);
4250 tree rhs2_type = TREE_TYPE (rhs2);
4251 tree rhs3 = gimple_assign_rhs3 (stmt);
4252 tree rhs3_type = TREE_TYPE (rhs3);
4254 if (!is_gimple_reg (lhs))
4256 error ("non-register as LHS of ternary operation");
4257 return true;
4260 if ((rhs_code == COND_EXPR
4261 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
4262 || !is_gimple_val (rhs2)
4263 || !is_gimple_val (rhs3))
4265 error ("invalid operands in ternary operation");
4266 return true;
4269 const char* const code_name = get_tree_code_name (rhs_code);
4271 /* First handle operations that involve different types. */
4272 switch (rhs_code)
4274 case WIDEN_MULT_PLUS_EXPR:
4275 case WIDEN_MULT_MINUS_EXPR:
4276 if ((!INTEGRAL_TYPE_P (rhs1_type)
4277 && !FIXED_POINT_TYPE_P (rhs1_type))
4278 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4279 || !useless_type_conversion_p (lhs_type, rhs3_type)
4280 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4281 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4283 error ("type mismatch in %qs", code_name);
4284 debug_generic_expr (lhs_type);
4285 debug_generic_expr (rhs1_type);
4286 debug_generic_expr (rhs2_type);
4287 debug_generic_expr (rhs3_type);
4288 return true;
4290 break;
4292 case VEC_COND_EXPR:
4293 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4294 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4295 TYPE_VECTOR_SUBPARTS (lhs_type)))
4297 error ("the first argument of a %qs must be of a "
4298 "boolean vector type of the same number of elements "
4299 "as the result", code_name);
4300 debug_generic_expr (lhs_type);
4301 debug_generic_expr (rhs1_type);
4302 return true;
4304 if (!is_gimple_val (rhs1))
4305 return true;
4306 /* Fallthrough. */
4307 case COND_EXPR:
4308 if (!is_gimple_val (rhs1)
4309 && verify_gimple_comparison (TREE_TYPE (rhs1),
4310 TREE_OPERAND (rhs1, 0),
4311 TREE_OPERAND (rhs1, 1),
4312 TREE_CODE (rhs1)))
4313 return true;
4314 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4315 || !useless_type_conversion_p (lhs_type, rhs3_type))
4317 error ("type mismatch in %qs", code_name);
4318 debug_generic_expr (lhs_type);
4319 debug_generic_expr (rhs2_type);
4320 debug_generic_expr (rhs3_type);
4321 return true;
4323 break;
4325 case VEC_PERM_EXPR:
4326 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4327 || !useless_type_conversion_p (lhs_type, rhs2_type))
4329 error ("type mismatch in %qs", code_name);
4330 debug_generic_expr (lhs_type);
4331 debug_generic_expr (rhs1_type);
4332 debug_generic_expr (rhs2_type);
4333 debug_generic_expr (rhs3_type);
4334 return true;
4337 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4338 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4339 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4341 error ("vector types expected in %qs", code_name);
4342 debug_generic_expr (lhs_type);
4343 debug_generic_expr (rhs1_type);
4344 debug_generic_expr (rhs2_type);
4345 debug_generic_expr (rhs3_type);
4346 return true;
4349 if (maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4350 TYPE_VECTOR_SUBPARTS (rhs2_type))
4351 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs2_type),
4352 TYPE_VECTOR_SUBPARTS (rhs3_type))
4353 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs3_type),
4354 TYPE_VECTOR_SUBPARTS (lhs_type)))
4356 error ("vectors with different element number found in %qs",
4357 code_name);
4358 debug_generic_expr (lhs_type);
4359 debug_generic_expr (rhs1_type);
4360 debug_generic_expr (rhs2_type);
4361 debug_generic_expr (rhs3_type);
4362 return true;
4365 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4366 || (TREE_CODE (rhs3) != VECTOR_CST
4367 && (GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE
4368 (TREE_TYPE (rhs3_type)))
4369 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE
4370 (TREE_TYPE (rhs1_type))))))
4372 error ("invalid mask type in %qs", code_name);
4373 debug_generic_expr (lhs_type);
4374 debug_generic_expr (rhs1_type);
4375 debug_generic_expr (rhs2_type);
4376 debug_generic_expr (rhs3_type);
4377 return true;
4380 return false;
4382 case SAD_EXPR:
4383 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4384 || !useless_type_conversion_p (lhs_type, rhs3_type)
4385 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4386 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4388 error ("type mismatch in %qs", code_name);
4389 debug_generic_expr (lhs_type);
4390 debug_generic_expr (rhs1_type);
4391 debug_generic_expr (rhs2_type);
4392 debug_generic_expr (rhs3_type);
4393 return true;
4396 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4397 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4398 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4400 error ("vector types expected in %qs", code_name);
4401 debug_generic_expr (lhs_type);
4402 debug_generic_expr (rhs1_type);
4403 debug_generic_expr (rhs2_type);
4404 debug_generic_expr (rhs3_type);
4405 return true;
4408 return false;
4410 case BIT_INSERT_EXPR:
4411 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4413 error ("type mismatch in %qs", code_name);
4414 debug_generic_expr (lhs_type);
4415 debug_generic_expr (rhs1_type);
4416 return true;
4418 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4419 && INTEGRAL_TYPE_P (rhs2_type))
4420 /* Vector element insert. */
4421 || (VECTOR_TYPE_P (rhs1_type)
4422 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))
4423 /* Aligned sub-vector insert. */
4424 || (VECTOR_TYPE_P (rhs1_type)
4425 && VECTOR_TYPE_P (rhs2_type)
4426 && types_compatible_p (TREE_TYPE (rhs1_type),
4427 TREE_TYPE (rhs2_type))
4428 && multiple_p (TYPE_VECTOR_SUBPARTS (rhs1_type),
4429 TYPE_VECTOR_SUBPARTS (rhs2_type))
4430 && multiple_of_p (bitsizetype, rhs3, TYPE_SIZE (rhs2_type)))))
4432 error ("not allowed type combination in %qs", code_name);
4433 debug_generic_expr (rhs1_type);
4434 debug_generic_expr (rhs2_type);
4435 return true;
4437 if (! tree_fits_uhwi_p (rhs3)
4438 || ! types_compatible_p (bitsizetype, TREE_TYPE (rhs3))
4439 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4441 error ("invalid position or size in %qs", code_name);
4442 return true;
4444 if (INTEGRAL_TYPE_P (rhs1_type)
4445 && !type_has_mode_precision_p (rhs1_type))
4447 error ("%qs into non-mode-precision operand", code_name);
4448 return true;
4450 if (INTEGRAL_TYPE_P (rhs1_type))
4452 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4453 if (bitpos >= TYPE_PRECISION (rhs1_type)
4454 || (bitpos + TYPE_PRECISION (rhs2_type)
4455 > TYPE_PRECISION (rhs1_type)))
4457 error ("insertion out of range in %qs", code_name);
4458 return true;
4461 else if (VECTOR_TYPE_P (rhs1_type))
4463 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4464 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4465 if (bitpos % bitsize != 0)
4467 error ("%qs not at element boundary", code_name);
4468 return true;
4471 return false;
4473 case DOT_PROD_EXPR:
4475 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4476 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4477 && ((!INTEGRAL_TYPE_P (rhs1_type)
4478 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4479 || (!INTEGRAL_TYPE_P (lhs_type)
4480 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4481 /* rhs1_type and rhs2_type may differ in sign. */
4482 || !tree_nop_conversion_p (rhs1_type, rhs2_type)
4483 || !useless_type_conversion_p (lhs_type, rhs3_type)
4484 || maybe_lt (GET_MODE_SIZE (element_mode (rhs3_type)),
4485 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4487 error ("type mismatch in %qs", code_name);
4488 debug_generic_expr (lhs_type);
4489 debug_generic_expr (rhs1_type);
4490 debug_generic_expr (rhs2_type);
4491 return true;
4493 return false;
4496 case REALIGN_LOAD_EXPR:
4497 /* FIXME. */
4498 return false;
4500 default:
4501 gcc_unreachable ();
4503 return false;
4506 /* Verify a gimple assignment statement STMT with a single rhs.
4507 Returns true if anything is wrong. */
4509 static bool
4510 verify_gimple_assign_single (gassign *stmt)
4512 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4513 tree lhs = gimple_assign_lhs (stmt);
4514 tree lhs_type = TREE_TYPE (lhs);
4515 tree rhs1 = gimple_assign_rhs1 (stmt);
4516 tree rhs1_type = TREE_TYPE (rhs1);
4517 bool res = false;
4519 const char* const code_name = get_tree_code_name (rhs_code);
4521 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4523 error ("non-trivial conversion in %qs", code_name);
4524 debug_generic_expr (lhs_type);
4525 debug_generic_expr (rhs1_type);
4526 return true;
4529 if (gimple_clobber_p (stmt)
4530 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4532 error ("%qs LHS in clobber statement",
4533 get_tree_code_name (TREE_CODE (lhs)));
4534 debug_generic_expr (lhs);
4535 return true;
4538 if (TREE_CODE (lhs) == WITH_SIZE_EXPR)
4540 error ("%qs LHS in assignment statement",
4541 get_tree_code_name (TREE_CODE (lhs)));
4542 debug_generic_expr (lhs);
4543 return true;
4546 if (handled_component_p (lhs)
4547 || TREE_CODE (lhs) == MEM_REF
4548 || TREE_CODE (lhs) == TARGET_MEM_REF)
4549 res |= verify_types_in_gimple_reference (lhs, true);
4551 /* Special codes we cannot handle via their class. */
4552 switch (rhs_code)
4554 case ADDR_EXPR:
4556 tree op = TREE_OPERAND (rhs1, 0);
4557 if (!is_gimple_addressable (op))
4559 error ("invalid operand in %qs", code_name);
4560 return true;
4563 /* Technically there is no longer a need for matching types, but
4564 gimple hygiene asks for this check. In LTO we can end up
4565 combining incompatible units and thus end up with addresses
4566 of globals that change their type to a common one. */
4567 if (!in_lto_p
4568 && !types_compatible_p (TREE_TYPE (op),
4569 TREE_TYPE (TREE_TYPE (rhs1)))
4570 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4571 TREE_TYPE (op)))
4573 error ("type mismatch in %qs", code_name);
4574 debug_generic_stmt (TREE_TYPE (rhs1));
4575 debug_generic_stmt (TREE_TYPE (op));
4576 return true;
4579 return (verify_address (rhs1, true)
4580 || verify_types_in_gimple_reference (op, true));
4583 /* tcc_reference */
4584 case INDIRECT_REF:
4585 error ("%qs in gimple IL", code_name);
4586 return true;
4588 case COMPONENT_REF:
4589 case BIT_FIELD_REF:
4590 case ARRAY_REF:
4591 case ARRAY_RANGE_REF:
4592 case VIEW_CONVERT_EXPR:
4593 case REALPART_EXPR:
4594 case IMAGPART_EXPR:
4595 case TARGET_MEM_REF:
4596 case MEM_REF:
4597 if (!is_gimple_reg (lhs)
4598 && is_gimple_reg_type (TREE_TYPE (lhs)))
4600 error ("invalid RHS for gimple memory store: %qs", code_name);
4601 debug_generic_stmt (lhs);
4602 debug_generic_stmt (rhs1);
4603 return true;
4605 return res || verify_types_in_gimple_reference (rhs1, false);
4607 /* tcc_constant */
4608 case SSA_NAME:
4609 case INTEGER_CST:
4610 case REAL_CST:
4611 case FIXED_CST:
4612 case COMPLEX_CST:
4613 case VECTOR_CST:
4614 case STRING_CST:
4615 return res;
4617 /* tcc_declaration */
4618 case CONST_DECL:
4619 return res;
4620 case VAR_DECL:
4621 case PARM_DECL:
4622 if (!is_gimple_reg (lhs)
4623 && !is_gimple_reg (rhs1)
4624 && is_gimple_reg_type (TREE_TYPE (lhs)))
4626 error ("invalid RHS for gimple memory store: %qs", code_name);
4627 debug_generic_stmt (lhs);
4628 debug_generic_stmt (rhs1);
4629 return true;
4631 return res;
4633 case CONSTRUCTOR:
4634 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4636 unsigned int i;
4637 tree elt_i, elt_v, elt_t = NULL_TREE;
4639 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4640 return res;
4641 /* For vector CONSTRUCTORs we require that either it is empty
4642 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4643 (then the element count must be correct to cover the whole
4644 outer vector and index must be NULL on all elements, or it is
4645 a CONSTRUCTOR of scalar elements, where we as an exception allow
4646 smaller number of elements (assuming zero filling) and
4647 consecutive indexes as compared to NULL indexes (such
4648 CONSTRUCTORs can appear in the IL from FEs). */
4649 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4651 if (elt_t == NULL_TREE)
4653 elt_t = TREE_TYPE (elt_v);
4654 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4656 tree elt_t = TREE_TYPE (elt_v);
4657 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4658 TREE_TYPE (elt_t)))
4660 error ("incorrect type of vector %qs elements",
4661 code_name);
4662 debug_generic_stmt (rhs1);
4663 return true;
4665 else if (maybe_ne (CONSTRUCTOR_NELTS (rhs1)
4666 * TYPE_VECTOR_SUBPARTS (elt_t),
4667 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4669 error ("incorrect number of vector %qs elements",
4670 code_name);
4671 debug_generic_stmt (rhs1);
4672 return true;
4675 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4676 elt_t))
4678 error ("incorrect type of vector %qs elements",
4679 code_name);
4680 debug_generic_stmt (rhs1);
4681 return true;
4683 else if (maybe_gt (CONSTRUCTOR_NELTS (rhs1),
4684 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4686 error ("incorrect number of vector %qs elements",
4687 code_name);
4688 debug_generic_stmt (rhs1);
4689 return true;
4692 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4694 error ("incorrect type of vector CONSTRUCTOR elements");
4695 debug_generic_stmt (rhs1);
4696 return true;
4698 if (elt_i != NULL_TREE
4699 && (TREE_CODE (elt_t) == VECTOR_TYPE
4700 || TREE_CODE (elt_i) != INTEGER_CST
4701 || compare_tree_int (elt_i, i) != 0))
4703 error ("vector %qs with non-NULL element index",
4704 code_name);
4705 debug_generic_stmt (rhs1);
4706 return true;
4708 if (!is_gimple_val (elt_v))
4710 error ("vector %qs element is not a GIMPLE value",
4711 code_name);
4712 debug_generic_stmt (rhs1);
4713 return true;
4717 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4719 error ("non-vector %qs with elements", code_name);
4720 debug_generic_stmt (rhs1);
4721 return true;
4723 return res;
4725 case ASSERT_EXPR:
4726 /* FIXME. */
4727 rhs1 = fold (ASSERT_EXPR_COND (rhs1));
4728 if (rhs1 == boolean_false_node)
4730 error ("%qs with an always-false condition", code_name);
4731 debug_generic_stmt (rhs1);
4732 return true;
4734 break;
4736 case WITH_SIZE_EXPR:
4737 error ("%qs RHS in assignment statement",
4738 get_tree_code_name (rhs_code));
4739 debug_generic_expr (rhs1);
4740 return true;
4742 case OBJ_TYPE_REF:
4743 /* FIXME. */
4744 return res;
4746 default:;
4749 return res;
4752 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4753 is a problem, otherwise false. */
4755 static bool
4756 verify_gimple_assign (gassign *stmt)
4758 switch (gimple_assign_rhs_class (stmt))
4760 case GIMPLE_SINGLE_RHS:
4761 return verify_gimple_assign_single (stmt);
4763 case GIMPLE_UNARY_RHS:
4764 return verify_gimple_assign_unary (stmt);
4766 case GIMPLE_BINARY_RHS:
4767 return verify_gimple_assign_binary (stmt);
4769 case GIMPLE_TERNARY_RHS:
4770 return verify_gimple_assign_ternary (stmt);
4772 default:
4773 gcc_unreachable ();
4777 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4778 is a problem, otherwise false. */
4780 static bool
4781 verify_gimple_return (greturn *stmt)
4783 tree op = gimple_return_retval (stmt);
4784 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4786 /* We cannot test for present return values as we do not fix up missing
4787 return values from the original source. */
4788 if (op == NULL)
4789 return false;
4791 if (!is_gimple_val (op)
4792 && TREE_CODE (op) != RESULT_DECL)
4794 error ("invalid operand in return statement");
4795 debug_generic_stmt (op);
4796 return true;
4799 if ((TREE_CODE (op) == RESULT_DECL
4800 && DECL_BY_REFERENCE (op))
4801 || (TREE_CODE (op) == SSA_NAME
4802 && SSA_NAME_VAR (op)
4803 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4804 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4805 op = TREE_TYPE (op);
4807 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4809 error ("invalid conversion in return statement");
4810 debug_generic_stmt (restype);
4811 debug_generic_stmt (TREE_TYPE (op));
4812 return true;
4815 return false;
4819 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4820 is a problem, otherwise false. */
4822 static bool
4823 verify_gimple_goto (ggoto *stmt)
4825 tree dest = gimple_goto_dest (stmt);
4827 /* ??? We have two canonical forms of direct goto destinations, a
4828 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4829 if (TREE_CODE (dest) != LABEL_DECL
4830 && (!is_gimple_val (dest)
4831 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4833 error ("goto destination is neither a label nor a pointer");
4834 return true;
4837 return false;
4840 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4841 is a problem, otherwise false. */
4843 static bool
4844 verify_gimple_switch (gswitch *stmt)
4846 unsigned int i, n;
4847 tree elt, prev_upper_bound = NULL_TREE;
4848 tree index_type, elt_type = NULL_TREE;
4850 if (!is_gimple_val (gimple_switch_index (stmt)))
4852 error ("invalid operand to switch statement");
4853 debug_generic_stmt (gimple_switch_index (stmt));
4854 return true;
4857 index_type = TREE_TYPE (gimple_switch_index (stmt));
4858 if (! INTEGRAL_TYPE_P (index_type))
4860 error ("non-integral type switch statement");
4861 debug_generic_expr (index_type);
4862 return true;
4865 elt = gimple_switch_label (stmt, 0);
4866 if (CASE_LOW (elt) != NULL_TREE
4867 || CASE_HIGH (elt) != NULL_TREE
4868 || CASE_CHAIN (elt) != NULL_TREE)
4870 error ("invalid default case label in switch statement");
4871 debug_generic_expr (elt);
4872 return true;
4875 n = gimple_switch_num_labels (stmt);
4876 for (i = 1; i < n; i++)
4878 elt = gimple_switch_label (stmt, i);
4880 if (CASE_CHAIN (elt))
4882 error ("invalid %<CASE_CHAIN%>");
4883 debug_generic_expr (elt);
4884 return true;
4886 if (! CASE_LOW (elt))
4888 error ("invalid case label in switch statement");
4889 debug_generic_expr (elt);
4890 return true;
4892 if (CASE_HIGH (elt)
4893 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4895 error ("invalid case range in switch statement");
4896 debug_generic_expr (elt);
4897 return true;
4900 if (! elt_type)
4902 elt_type = TREE_TYPE (CASE_LOW (elt));
4903 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4905 error ("type precision mismatch in switch statement");
4906 return true;
4909 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4910 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4912 error ("type mismatch for case label in switch statement");
4913 debug_generic_expr (elt);
4914 return true;
4917 if (prev_upper_bound)
4919 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4921 error ("case labels not sorted in switch statement");
4922 return true;
4926 prev_upper_bound = CASE_HIGH (elt);
4927 if (! prev_upper_bound)
4928 prev_upper_bound = CASE_LOW (elt);
4931 return false;
4934 /* Verify a gimple debug statement STMT.
4935 Returns true if anything is wrong. */
4937 static bool
4938 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4940 /* There isn't much that could be wrong in a gimple debug stmt. A
4941 gimple debug bind stmt, for example, maps a tree, that's usually
4942 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4943 component or member of an aggregate type, to another tree, that
4944 can be an arbitrary expression. These stmts expand into debug
4945 insns, and are converted to debug notes by var-tracking.c. */
4946 return false;
4949 /* Verify a gimple label statement STMT.
4950 Returns true if anything is wrong. */
4952 static bool
4953 verify_gimple_label (glabel *stmt)
4955 tree decl = gimple_label_label (stmt);
4956 int uid;
4957 bool err = false;
4959 if (TREE_CODE (decl) != LABEL_DECL)
4960 return true;
4961 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4962 && DECL_CONTEXT (decl) != current_function_decl)
4964 error ("label context is not the current function declaration");
4965 err |= true;
4968 uid = LABEL_DECL_UID (decl);
4969 if (cfun->cfg
4970 && (uid == -1
4971 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4973 error ("incorrect entry in %<label_to_block_map%>");
4974 err |= true;
4977 uid = EH_LANDING_PAD_NR (decl);
4978 if (uid)
4980 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4981 if (decl != lp->post_landing_pad)
4983 error ("incorrect setting of landing pad number");
4984 err |= true;
4988 return err;
4991 /* Verify a gimple cond statement STMT.
4992 Returns true if anything is wrong. */
4994 static bool
4995 verify_gimple_cond (gcond *stmt)
4997 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4999 error ("invalid comparison code in gimple cond");
5000 return true;
5002 if (!(!gimple_cond_true_label (stmt)
5003 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
5004 || !(!gimple_cond_false_label (stmt)
5005 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
5007 error ("invalid labels in gimple cond");
5008 return true;
5011 return verify_gimple_comparison (boolean_type_node,
5012 gimple_cond_lhs (stmt),
5013 gimple_cond_rhs (stmt),
5014 gimple_cond_code (stmt));
5017 /* Verify the GIMPLE statement STMT. Returns true if there is an
5018 error, otherwise false. */
5020 static bool
5021 verify_gimple_stmt (gimple *stmt)
5023 switch (gimple_code (stmt))
5025 case GIMPLE_ASSIGN:
5026 return verify_gimple_assign (as_a <gassign *> (stmt));
5028 case GIMPLE_LABEL:
5029 return verify_gimple_label (as_a <glabel *> (stmt));
5031 case GIMPLE_CALL:
5032 return verify_gimple_call (as_a <gcall *> (stmt));
5034 case GIMPLE_COND:
5035 return verify_gimple_cond (as_a <gcond *> (stmt));
5037 case GIMPLE_GOTO:
5038 return verify_gimple_goto (as_a <ggoto *> (stmt));
5040 case GIMPLE_SWITCH:
5041 return verify_gimple_switch (as_a <gswitch *> (stmt));
5043 case GIMPLE_RETURN:
5044 return verify_gimple_return (as_a <greturn *> (stmt));
5046 case GIMPLE_ASM:
5047 return false;
5049 case GIMPLE_TRANSACTION:
5050 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
5052 /* Tuples that do not have tree operands. */
5053 case GIMPLE_NOP:
5054 case GIMPLE_PREDICT:
5055 case GIMPLE_RESX:
5056 case GIMPLE_EH_DISPATCH:
5057 case GIMPLE_EH_MUST_NOT_THROW:
5058 return false;
5060 CASE_GIMPLE_OMP:
5061 /* OpenMP directives are validated by the FE and never operated
5062 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
5063 non-gimple expressions when the main index variable has had
5064 its address taken. This does not affect the loop itself
5065 because the header of an GIMPLE_OMP_FOR is merely used to determine
5066 how to setup the parallel iteration. */
5067 return false;
5069 case GIMPLE_DEBUG:
5070 return verify_gimple_debug (stmt);
5072 default:
5073 gcc_unreachable ();
5077 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
5078 and false otherwise. */
5080 static bool
5081 verify_gimple_phi (gphi *phi)
5083 bool err = false;
5084 unsigned i;
5085 tree phi_result = gimple_phi_result (phi);
5086 bool virtual_p;
5088 if (!phi_result)
5090 error ("invalid %<PHI%> result");
5091 return true;
5094 virtual_p = virtual_operand_p (phi_result);
5095 if (TREE_CODE (phi_result) != SSA_NAME
5096 || (virtual_p
5097 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
5099 error ("invalid %<PHI%> result");
5100 err = true;
5103 for (i = 0; i < gimple_phi_num_args (phi); i++)
5105 tree t = gimple_phi_arg_def (phi, i);
5107 if (!t)
5109 error ("missing %<PHI%> def");
5110 err |= true;
5111 continue;
5113 /* Addressable variables do have SSA_NAMEs but they
5114 are not considered gimple values. */
5115 else if ((TREE_CODE (t) == SSA_NAME
5116 && virtual_p != virtual_operand_p (t))
5117 || (virtual_p
5118 && (TREE_CODE (t) != SSA_NAME
5119 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
5120 || (!virtual_p
5121 && !is_gimple_val (t)))
5123 error ("invalid %<PHI%> argument");
5124 debug_generic_expr (t);
5125 err |= true;
5127 #ifdef ENABLE_TYPES_CHECKING
5128 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
5130 error ("incompatible types in %<PHI%> argument %u", i);
5131 debug_generic_stmt (TREE_TYPE (phi_result));
5132 debug_generic_stmt (TREE_TYPE (t));
5133 err |= true;
5135 #endif
5138 return err;
5141 /* Verify the GIMPLE statements inside the sequence STMTS. */
5143 static bool
5144 verify_gimple_in_seq_2 (gimple_seq stmts)
5146 gimple_stmt_iterator ittr;
5147 bool err = false;
5149 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
5151 gimple *stmt = gsi_stmt (ittr);
5153 switch (gimple_code (stmt))
5155 case GIMPLE_BIND:
5156 err |= verify_gimple_in_seq_2 (
5157 gimple_bind_body (as_a <gbind *> (stmt)));
5158 break;
5160 case GIMPLE_TRY:
5161 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
5162 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
5163 break;
5165 case GIMPLE_EH_FILTER:
5166 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
5167 break;
5169 case GIMPLE_EH_ELSE:
5171 geh_else *eh_else = as_a <geh_else *> (stmt);
5172 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
5173 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
5175 break;
5177 case GIMPLE_CATCH:
5178 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
5179 as_a <gcatch *> (stmt)));
5180 break;
5182 case GIMPLE_TRANSACTION:
5183 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
5184 break;
5186 default:
5188 bool err2 = verify_gimple_stmt (stmt);
5189 if (err2)
5190 debug_gimple_stmt (stmt);
5191 err |= err2;
5196 return err;
5199 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
5200 is a problem, otherwise false. */
5202 static bool
5203 verify_gimple_transaction (gtransaction *stmt)
5205 tree lab;
5207 lab = gimple_transaction_label_norm (stmt);
5208 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5209 return true;
5210 lab = gimple_transaction_label_uninst (stmt);
5211 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5212 return true;
5213 lab = gimple_transaction_label_over (stmt);
5214 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5215 return true;
5217 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
5221 /* Verify the GIMPLE statements inside the statement list STMTS. */
5223 DEBUG_FUNCTION void
5224 verify_gimple_in_seq (gimple_seq stmts)
5226 timevar_push (TV_TREE_STMT_VERIFY);
5227 if (verify_gimple_in_seq_2 (stmts))
5228 internal_error ("%<verify_gimple%> failed");
5229 timevar_pop (TV_TREE_STMT_VERIFY);
5232 /* Return true when the T can be shared. */
5234 static bool
5235 tree_node_can_be_shared (tree t)
5237 if (IS_TYPE_OR_DECL_P (t)
5238 || TREE_CODE (t) == SSA_NAME
5239 || TREE_CODE (t) == IDENTIFIER_NODE
5240 || TREE_CODE (t) == CASE_LABEL_EXPR
5241 || is_gimple_min_invariant (t))
5242 return true;
5244 if (t == error_mark_node)
5245 return true;
5247 return false;
5250 /* Called via walk_tree. Verify tree sharing. */
5252 static tree
5253 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
5255 hash_set<void *> *visited = (hash_set<void *> *) data;
5257 if (tree_node_can_be_shared (*tp))
5259 *walk_subtrees = false;
5260 return NULL;
5263 if (visited->add (*tp))
5264 return *tp;
5266 return NULL;
5269 /* Called via walk_gimple_stmt. Verify tree sharing. */
5271 static tree
5272 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5274 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5275 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5278 static bool eh_error_found;
5279 bool
5280 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5281 hash_set<gimple *> *visited)
5283 if (!visited->contains (stmt))
5285 error ("dead statement in EH table");
5286 debug_gimple_stmt (stmt);
5287 eh_error_found = true;
5289 return true;
5292 /* Verify if the location LOCs block is in BLOCKS. */
5294 static bool
5295 verify_location (hash_set<tree> *blocks, location_t loc)
5297 tree block = LOCATION_BLOCK (loc);
5298 if (block != NULL_TREE
5299 && !blocks->contains (block))
5301 error ("location references block not in block tree");
5302 return true;
5304 if (block != NULL_TREE)
5305 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5306 return false;
5309 /* Called via walk_tree. Verify that expressions have no blocks. */
5311 static tree
5312 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5314 if (!EXPR_P (*tp))
5316 *walk_subtrees = false;
5317 return NULL;
5320 location_t loc = EXPR_LOCATION (*tp);
5321 if (LOCATION_BLOCK (loc) != NULL)
5322 return *tp;
5324 return NULL;
5327 /* Called via walk_tree. Verify locations of expressions. */
5329 static tree
5330 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5332 hash_set<tree> *blocks = (hash_set<tree> *) data;
5333 tree t = *tp;
5335 /* ??? This doesn't really belong here but there's no good place to
5336 stick this remainder of old verify_expr. */
5337 /* ??? This barfs on debug stmts which contain binds to vars with
5338 different function context. */
5339 #if 0
5340 if (VAR_P (t)
5341 || TREE_CODE (t) == PARM_DECL
5342 || TREE_CODE (t) == RESULT_DECL)
5344 tree context = decl_function_context (t);
5345 if (context != cfun->decl
5346 && !SCOPE_FILE_SCOPE_P (context)
5347 && !TREE_STATIC (t)
5348 && !DECL_EXTERNAL (t))
5350 error ("local declaration from a different function");
5351 return t;
5354 #endif
5356 if (VAR_P (t) && DECL_HAS_DEBUG_EXPR_P (t))
5358 tree x = DECL_DEBUG_EXPR (t);
5359 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5360 if (addr)
5361 return addr;
5363 if ((VAR_P (t)
5364 || TREE_CODE (t) == PARM_DECL
5365 || TREE_CODE (t) == RESULT_DECL)
5366 && DECL_HAS_VALUE_EXPR_P (t))
5368 tree x = DECL_VALUE_EXPR (t);
5369 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5370 if (addr)
5371 return addr;
5374 if (!EXPR_P (t))
5376 *walk_subtrees = false;
5377 return NULL;
5380 location_t loc = EXPR_LOCATION (t);
5381 if (verify_location (blocks, loc))
5382 return t;
5384 return NULL;
5387 /* Called via walk_gimple_op. Verify locations of expressions. */
5389 static tree
5390 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5392 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5393 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5396 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5398 static void
5399 collect_subblocks (hash_set<tree> *blocks, tree block)
5401 tree t;
5402 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5404 blocks->add (t);
5405 collect_subblocks (blocks, t);
5409 /* Disable warnings about missing quoting in GCC diagnostics for
5410 the verification errors. Their format strings don't follow
5411 GCC diagnostic conventions and trigger an ICE in the end. */
5412 #if __GNUC__ >= 10
5413 # pragma GCC diagnostic push
5414 # pragma GCC diagnostic ignored "-Wformat-diag"
5415 #endif
5417 /* Verify the GIMPLE statements in the CFG of FN. */
5419 DEBUG_FUNCTION void
5420 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5422 basic_block bb;
5423 bool err = false;
5425 timevar_push (TV_TREE_STMT_VERIFY);
5426 hash_set<void *> visited;
5427 hash_set<gimple *> visited_throwing_stmts;
5429 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5430 hash_set<tree> blocks;
5431 if (DECL_INITIAL (fn->decl))
5433 blocks.add (DECL_INITIAL (fn->decl));
5434 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5437 FOR_EACH_BB_FN (bb, fn)
5439 gimple_stmt_iterator gsi;
5440 edge_iterator ei;
5441 edge e;
5443 for (gphi_iterator gpi = gsi_start_phis (bb);
5444 !gsi_end_p (gpi);
5445 gsi_next (&gpi))
5447 gphi *phi = gpi.phi ();
5448 bool err2 = false;
5449 unsigned i;
5451 if (gimple_bb (phi) != bb)
5453 error ("gimple_bb (phi) is set to a wrong basic block");
5454 err2 = true;
5457 err2 |= verify_gimple_phi (phi);
5459 /* Only PHI arguments have locations. */
5460 if (gimple_location (phi) != UNKNOWN_LOCATION)
5462 error ("PHI node with location");
5463 err2 = true;
5466 for (i = 0; i < gimple_phi_num_args (phi); i++)
5468 tree arg = gimple_phi_arg_def (phi, i);
5469 tree addr = walk_tree (&arg, verify_node_sharing_1,
5470 &visited, NULL);
5471 if (addr)
5473 error ("incorrect sharing of tree nodes");
5474 debug_generic_expr (addr);
5475 err2 |= true;
5477 location_t loc = gimple_phi_arg_location (phi, i);
5478 if (virtual_operand_p (gimple_phi_result (phi))
5479 && loc != UNKNOWN_LOCATION)
5481 error ("virtual PHI with argument locations");
5482 err2 = true;
5484 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5485 if (addr)
5487 debug_generic_expr (addr);
5488 err2 = true;
5490 err2 |= verify_location (&blocks, loc);
5493 if (err2)
5494 debug_gimple_stmt (phi);
5495 err |= err2;
5498 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5500 gimple *stmt = gsi_stmt (gsi);
5501 bool err2 = false;
5502 struct walk_stmt_info wi;
5503 tree addr;
5504 int lp_nr;
5506 if (gimple_bb (stmt) != bb)
5508 error ("gimple_bb (stmt) is set to a wrong basic block");
5509 err2 = true;
5512 err2 |= verify_gimple_stmt (stmt);
5513 err2 |= verify_location (&blocks, gimple_location (stmt));
5515 memset (&wi, 0, sizeof (wi));
5516 wi.info = (void *) &visited;
5517 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5518 if (addr)
5520 error ("incorrect sharing of tree nodes");
5521 debug_generic_expr (addr);
5522 err2 |= true;
5525 memset (&wi, 0, sizeof (wi));
5526 wi.info = (void *) &blocks;
5527 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5528 if (addr)
5530 debug_generic_expr (addr);
5531 err2 |= true;
5534 /* If the statement is marked as part of an EH region, then it is
5535 expected that the statement could throw. Verify that when we
5536 have optimizations that simplify statements such that we prove
5537 that they cannot throw, that we update other data structures
5538 to match. */
5539 lp_nr = lookup_stmt_eh_lp (stmt);
5540 if (lp_nr != 0)
5541 visited_throwing_stmts.add (stmt);
5542 if (lp_nr > 0)
5544 if (!stmt_could_throw_p (cfun, stmt))
5546 if (verify_nothrow)
5548 error ("statement marked for throw, but doesn%'t");
5549 err2 |= true;
5552 else if (!gsi_one_before_end_p (gsi))
5554 error ("statement marked for throw in middle of block");
5555 err2 |= true;
5559 if (err2)
5560 debug_gimple_stmt (stmt);
5561 err |= err2;
5564 FOR_EACH_EDGE (e, ei, bb->succs)
5565 if (e->goto_locus != UNKNOWN_LOCATION)
5566 err |= verify_location (&blocks, e->goto_locus);
5569 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5570 eh_error_found = false;
5571 if (eh_table)
5572 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5573 (&visited_throwing_stmts);
5575 if (err || eh_error_found)
5576 internal_error ("verify_gimple failed");
5578 verify_histograms ();
5579 timevar_pop (TV_TREE_STMT_VERIFY);
5583 /* Verifies that the flow information is OK. */
5585 static int
5586 gimple_verify_flow_info (void)
5588 int err = 0;
5589 basic_block bb;
5590 gimple_stmt_iterator gsi;
5591 gimple *stmt;
5592 edge e;
5593 edge_iterator ei;
5595 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5596 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5598 error ("ENTRY_BLOCK has IL associated with it");
5599 err = 1;
5602 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5603 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5605 error ("EXIT_BLOCK has IL associated with it");
5606 err = 1;
5609 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5610 if (e->flags & EDGE_FALLTHRU)
5612 error ("fallthru to exit from bb %d", e->src->index);
5613 err = 1;
5616 FOR_EACH_BB_FN (bb, cfun)
5618 bool found_ctrl_stmt = false;
5620 stmt = NULL;
5622 /* Skip labels on the start of basic block. */
5623 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5625 tree label;
5626 gimple *prev_stmt = stmt;
5628 stmt = gsi_stmt (gsi);
5630 if (gimple_code (stmt) != GIMPLE_LABEL)
5631 break;
5633 label = gimple_label_label (as_a <glabel *> (stmt));
5634 if (prev_stmt && DECL_NONLOCAL (label))
5636 error ("nonlocal label %qD is not first in a sequence "
5637 "of labels in bb %d", label, bb->index);
5638 err = 1;
5641 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5643 error ("EH landing pad label %qD is not first in a sequence "
5644 "of labels in bb %d", label, bb->index);
5645 err = 1;
5648 if (label_to_block (cfun, label) != bb)
5650 error ("label %qD to block does not match in bb %d",
5651 label, bb->index);
5652 err = 1;
5655 if (decl_function_context (label) != current_function_decl)
5657 error ("label %qD has incorrect context in bb %d",
5658 label, bb->index);
5659 err = 1;
5663 /* Verify that body of basic block BB is free of control flow. */
5664 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5666 gimple *stmt = gsi_stmt (gsi);
5668 if (found_ctrl_stmt)
5670 error ("control flow in the middle of basic block %d",
5671 bb->index);
5672 err = 1;
5675 if (stmt_ends_bb_p (stmt))
5676 found_ctrl_stmt = true;
5678 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5680 error ("label %qD in the middle of basic block %d",
5681 gimple_label_label (label_stmt), bb->index);
5682 err = 1;
5686 gsi = gsi_last_nondebug_bb (bb);
5687 if (gsi_end_p (gsi))
5688 continue;
5690 stmt = gsi_stmt (gsi);
5692 if (gimple_code (stmt) == GIMPLE_LABEL)
5693 continue;
5695 err |= verify_eh_edges (stmt);
5697 if (is_ctrl_stmt (stmt))
5699 FOR_EACH_EDGE (e, ei, bb->succs)
5700 if (e->flags & EDGE_FALLTHRU)
5702 error ("fallthru edge after a control statement in bb %d",
5703 bb->index);
5704 err = 1;
5708 if (gimple_code (stmt) != GIMPLE_COND)
5710 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5711 after anything else but if statement. */
5712 FOR_EACH_EDGE (e, ei, bb->succs)
5713 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5715 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5716 bb->index);
5717 err = 1;
5721 switch (gimple_code (stmt))
5723 case GIMPLE_COND:
5725 edge true_edge;
5726 edge false_edge;
5728 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5730 if (!true_edge
5731 || !false_edge
5732 || !(true_edge->flags & EDGE_TRUE_VALUE)
5733 || !(false_edge->flags & EDGE_FALSE_VALUE)
5734 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5735 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5736 || EDGE_COUNT (bb->succs) >= 3)
5738 error ("wrong outgoing edge flags at end of bb %d",
5739 bb->index);
5740 err = 1;
5743 break;
5745 case GIMPLE_GOTO:
5746 if (simple_goto_p (stmt))
5748 error ("explicit goto at end of bb %d", bb->index);
5749 err = 1;
5751 else
5753 /* FIXME. We should double check that the labels in the
5754 destination blocks have their address taken. */
5755 FOR_EACH_EDGE (e, ei, bb->succs)
5756 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5757 | EDGE_FALSE_VALUE))
5758 || !(e->flags & EDGE_ABNORMAL))
5760 error ("wrong outgoing edge flags at end of bb %d",
5761 bb->index);
5762 err = 1;
5765 break;
5767 case GIMPLE_CALL:
5768 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5769 break;
5770 /* fallthru */
5771 case GIMPLE_RETURN:
5772 if (!single_succ_p (bb)
5773 || (single_succ_edge (bb)->flags
5774 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5775 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5777 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5778 err = 1;
5780 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5782 error ("return edge does not point to exit in bb %d",
5783 bb->index);
5784 err = 1;
5786 break;
5788 case GIMPLE_SWITCH:
5790 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5791 tree prev;
5792 edge e;
5793 size_t i, n;
5795 n = gimple_switch_num_labels (switch_stmt);
5797 /* Mark all the destination basic blocks. */
5798 for (i = 0; i < n; ++i)
5800 basic_block label_bb = gimple_switch_label_bb (cfun, switch_stmt, i);
5801 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5802 label_bb->aux = (void *)1;
5805 /* Verify that the case labels are sorted. */
5806 prev = gimple_switch_label (switch_stmt, 0);
5807 for (i = 1; i < n; ++i)
5809 tree c = gimple_switch_label (switch_stmt, i);
5810 if (!CASE_LOW (c))
5812 error ("found default case not at the start of "
5813 "case vector");
5814 err = 1;
5815 continue;
5817 if (CASE_LOW (prev)
5818 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5820 error ("case labels not sorted: ");
5821 print_generic_expr (stderr, prev);
5822 fprintf (stderr," is greater than ");
5823 print_generic_expr (stderr, c);
5824 fprintf (stderr," but comes before it.\n");
5825 err = 1;
5827 prev = c;
5829 /* VRP will remove the default case if it can prove it will
5830 never be executed. So do not verify there always exists
5831 a default case here. */
5833 FOR_EACH_EDGE (e, ei, bb->succs)
5835 if (!e->dest->aux)
5837 error ("extra outgoing edge %d->%d",
5838 bb->index, e->dest->index);
5839 err = 1;
5842 e->dest->aux = (void *)2;
5843 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5844 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5846 error ("wrong outgoing edge flags at end of bb %d",
5847 bb->index);
5848 err = 1;
5852 /* Check that we have all of them. */
5853 for (i = 0; i < n; ++i)
5855 basic_block label_bb = gimple_switch_label_bb (cfun,
5856 switch_stmt, i);
5858 if (label_bb->aux != (void *)2)
5860 error ("missing edge %i->%i", bb->index, label_bb->index);
5861 err = 1;
5865 FOR_EACH_EDGE (e, ei, bb->succs)
5866 e->dest->aux = (void *)0;
5868 break;
5870 case GIMPLE_EH_DISPATCH:
5871 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5872 break;
5874 default:
5875 break;
5879 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5880 verify_dominators (CDI_DOMINATORS);
5882 return err;
5885 #if __GNUC__ >= 10
5886 # pragma GCC diagnostic pop
5887 #endif
5889 /* Updates phi nodes after creating a forwarder block joined
5890 by edge FALLTHRU. */
5892 static void
5893 gimple_make_forwarder_block (edge fallthru)
5895 edge e;
5896 edge_iterator ei;
5897 basic_block dummy, bb;
5898 tree var;
5899 gphi_iterator gsi;
5900 bool forward_location_p;
5902 dummy = fallthru->src;
5903 bb = fallthru->dest;
5905 if (single_pred_p (bb))
5906 return;
5908 /* We can forward location info if we have only one predecessor. */
5909 forward_location_p = single_pred_p (dummy);
5911 /* If we redirected a branch we must create new PHI nodes at the
5912 start of BB. */
5913 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5915 gphi *phi, *new_phi;
5917 phi = gsi.phi ();
5918 var = gimple_phi_result (phi);
5919 new_phi = create_phi_node (var, bb);
5920 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5921 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5922 forward_location_p
5923 ? gimple_phi_arg_location (phi, 0) : UNKNOWN_LOCATION);
5926 /* Add the arguments we have stored on edges. */
5927 FOR_EACH_EDGE (e, ei, bb->preds)
5929 if (e == fallthru)
5930 continue;
5932 flush_pending_stmts (e);
5937 /* Return a non-special label in the head of basic block BLOCK.
5938 Create one if it doesn't exist. */
5940 tree
5941 gimple_block_label (basic_block bb)
5943 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5944 bool first = true;
5945 tree label;
5946 glabel *stmt;
5948 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5950 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5951 if (!stmt)
5952 break;
5953 label = gimple_label_label (stmt);
5954 if (!DECL_NONLOCAL (label))
5956 if (!first)
5957 gsi_move_before (&i, &s);
5958 return label;
5962 label = create_artificial_label (UNKNOWN_LOCATION);
5963 stmt = gimple_build_label (label);
5964 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5965 return label;
5969 /* Attempt to perform edge redirection by replacing a possibly complex
5970 jump instruction by a goto or by removing the jump completely.
5971 This can apply only if all edges now point to the same block. The
5972 parameters and return values are equivalent to
5973 redirect_edge_and_branch. */
5975 static edge
5976 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5978 basic_block src = e->src;
5979 gimple_stmt_iterator i;
5980 gimple *stmt;
5982 /* We can replace or remove a complex jump only when we have exactly
5983 two edges. */
5984 if (EDGE_COUNT (src->succs) != 2
5985 /* Verify that all targets will be TARGET. Specifically, the
5986 edge that is not E must also go to TARGET. */
5987 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5988 return NULL;
5990 i = gsi_last_bb (src);
5991 if (gsi_end_p (i))
5992 return NULL;
5994 stmt = gsi_stmt (i);
5996 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5998 gsi_remove (&i, true);
5999 e = ssa_redirect_edge (e, target);
6000 e->flags = EDGE_FALLTHRU;
6001 return e;
6004 return NULL;
6008 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
6009 edge representing the redirected branch. */
6011 static edge
6012 gimple_redirect_edge_and_branch (edge e, basic_block dest)
6014 basic_block bb = e->src;
6015 gimple_stmt_iterator gsi;
6016 edge ret;
6017 gimple *stmt;
6019 if (e->flags & EDGE_ABNORMAL)
6020 return NULL;
6022 if (e->dest == dest)
6023 return NULL;
6025 if (e->flags & EDGE_EH)
6026 return redirect_eh_edge (e, dest);
6028 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
6030 ret = gimple_try_redirect_by_replacing_jump (e, dest);
6031 if (ret)
6032 return ret;
6035 gsi = gsi_last_nondebug_bb (bb);
6036 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
6038 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
6040 case GIMPLE_COND:
6041 /* For COND_EXPR, we only need to redirect the edge. */
6042 break;
6044 case GIMPLE_GOTO:
6045 /* No non-abnormal edges should lead from a non-simple goto, and
6046 simple ones should be represented implicitly. */
6047 gcc_unreachable ();
6049 case GIMPLE_SWITCH:
6051 gswitch *switch_stmt = as_a <gswitch *> (stmt);
6052 tree label = gimple_block_label (dest);
6053 tree cases = get_cases_for_edge (e, switch_stmt);
6055 /* If we have a list of cases associated with E, then use it
6056 as it's a lot faster than walking the entire case vector. */
6057 if (cases)
6059 edge e2 = find_edge (e->src, dest);
6060 tree last, first;
6062 first = cases;
6063 while (cases)
6065 last = cases;
6066 CASE_LABEL (cases) = label;
6067 cases = CASE_CHAIN (cases);
6070 /* If there was already an edge in the CFG, then we need
6071 to move all the cases associated with E to E2. */
6072 if (e2)
6074 tree cases2 = get_cases_for_edge (e2, switch_stmt);
6076 CASE_CHAIN (last) = CASE_CHAIN (cases2);
6077 CASE_CHAIN (cases2) = first;
6079 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
6081 else
6083 size_t i, n = gimple_switch_num_labels (switch_stmt);
6085 for (i = 0; i < n; i++)
6087 tree elt = gimple_switch_label (switch_stmt, i);
6088 if (label_to_block (cfun, CASE_LABEL (elt)) == e->dest)
6089 CASE_LABEL (elt) = label;
6093 break;
6095 case GIMPLE_ASM:
6097 gasm *asm_stmt = as_a <gasm *> (stmt);
6098 int i, n = gimple_asm_nlabels (asm_stmt);
6099 tree label = NULL;
6101 for (i = 0; i < n; ++i)
6103 tree cons = gimple_asm_label_op (asm_stmt, i);
6104 if (label_to_block (cfun, TREE_VALUE (cons)) == e->dest)
6106 if (!label)
6107 label = gimple_block_label (dest);
6108 TREE_VALUE (cons) = label;
6112 /* If we didn't find any label matching the former edge in the
6113 asm labels, we must be redirecting the fallthrough
6114 edge. */
6115 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
6117 break;
6119 case GIMPLE_RETURN:
6120 gsi_remove (&gsi, true);
6121 e->flags |= EDGE_FALLTHRU;
6122 break;
6124 case GIMPLE_OMP_RETURN:
6125 case GIMPLE_OMP_CONTINUE:
6126 case GIMPLE_OMP_SECTIONS_SWITCH:
6127 case GIMPLE_OMP_FOR:
6128 /* The edges from OMP constructs can be simply redirected. */
6129 break;
6131 case GIMPLE_EH_DISPATCH:
6132 if (!(e->flags & EDGE_FALLTHRU))
6133 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
6134 break;
6136 case GIMPLE_TRANSACTION:
6137 if (e->flags & EDGE_TM_ABORT)
6138 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
6139 gimple_block_label (dest));
6140 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
6141 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
6142 gimple_block_label (dest));
6143 else
6144 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
6145 gimple_block_label (dest));
6146 break;
6148 default:
6149 /* Otherwise it must be a fallthru edge, and we don't need to
6150 do anything besides redirecting it. */
6151 gcc_assert (e->flags & EDGE_FALLTHRU);
6152 break;
6155 /* Update/insert PHI nodes as necessary. */
6157 /* Now update the edges in the CFG. */
6158 e = ssa_redirect_edge (e, dest);
6160 return e;
6163 /* Returns true if it is possible to remove edge E by redirecting
6164 it to the destination of the other edge from E->src. */
6166 static bool
6167 gimple_can_remove_branch_p (const_edge e)
6169 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
6170 return false;
6172 return true;
6175 /* Simple wrapper, as we can always redirect fallthru edges. */
6177 static basic_block
6178 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
6180 e = gimple_redirect_edge_and_branch (e, dest);
6181 gcc_assert (e);
6183 return NULL;
6187 /* Splits basic block BB after statement STMT (but at least after the
6188 labels). If STMT is NULL, BB is split just after the labels. */
6190 static basic_block
6191 gimple_split_block (basic_block bb, void *stmt)
6193 gimple_stmt_iterator gsi;
6194 gimple_stmt_iterator gsi_tgt;
6195 gimple_seq list;
6196 basic_block new_bb;
6197 edge e;
6198 edge_iterator ei;
6200 new_bb = create_empty_bb (bb);
6202 /* Redirect the outgoing edges. */
6203 new_bb->succs = bb->succs;
6204 bb->succs = NULL;
6205 FOR_EACH_EDGE (e, ei, new_bb->succs)
6206 e->src = new_bb;
6208 /* Get a stmt iterator pointing to the first stmt to move. */
6209 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
6210 gsi = gsi_after_labels (bb);
6211 else
6213 gsi = gsi_for_stmt ((gimple *) stmt);
6214 gsi_next (&gsi);
6217 /* Move everything from GSI to the new basic block. */
6218 if (gsi_end_p (gsi))
6219 return new_bb;
6221 /* Split the statement list - avoid re-creating new containers as this
6222 brings ugly quadratic memory consumption in the inliner.
6223 (We are still quadratic since we need to update stmt BB pointers,
6224 sadly.) */
6225 gsi_split_seq_before (&gsi, &list);
6226 set_bb_seq (new_bb, list);
6227 for (gsi_tgt = gsi_start (list);
6228 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
6229 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6231 return new_bb;
6235 /* Moves basic block BB after block AFTER. */
6237 static bool
6238 gimple_move_block_after (basic_block bb, basic_block after)
6240 if (bb->prev_bb == after)
6241 return true;
6243 unlink_block (bb);
6244 link_block (bb, after);
6246 return true;
6250 /* Return TRUE if block BB has no executable statements, otherwise return
6251 FALSE. */
6253 static bool
6254 gimple_empty_block_p (basic_block bb)
6256 /* BB must have no executable statements. */
6257 gimple_stmt_iterator gsi = gsi_after_labels (bb);
6258 if (phi_nodes (bb))
6259 return false;
6260 while (!gsi_end_p (gsi))
6262 gimple *stmt = gsi_stmt (gsi);
6263 if (is_gimple_debug (stmt))
6265 else if (gimple_code (stmt) == GIMPLE_NOP
6266 || gimple_code (stmt) == GIMPLE_PREDICT)
6268 else
6269 return false;
6270 gsi_next (&gsi);
6272 return true;
6276 /* Split a basic block if it ends with a conditional branch and if the
6277 other part of the block is not empty. */
6279 static basic_block
6280 gimple_split_block_before_cond_jump (basic_block bb)
6282 gimple *last, *split_point;
6283 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6284 if (gsi_end_p (gsi))
6285 return NULL;
6286 last = gsi_stmt (gsi);
6287 if (gimple_code (last) != GIMPLE_COND
6288 && gimple_code (last) != GIMPLE_SWITCH)
6289 return NULL;
6290 gsi_prev (&gsi);
6291 split_point = gsi_stmt (gsi);
6292 return split_block (bb, split_point)->dest;
6296 /* Return true if basic_block can be duplicated. */
6298 static bool
6299 gimple_can_duplicate_bb_p (const_basic_block bb)
6301 gimple *last = last_stmt (CONST_CAST_BB (bb));
6303 /* Do checks that can only fail for the last stmt, to minimize the work in the
6304 stmt loop. */
6305 if (last) {
6306 /* A transaction is a single entry multiple exit region. It
6307 must be duplicated in its entirety or not at all. */
6308 if (gimple_code (last) == GIMPLE_TRANSACTION)
6309 return false;
6311 /* An IFN_UNIQUE call must be duplicated as part of its group,
6312 or not at all. */
6313 if (is_gimple_call (last)
6314 && gimple_call_internal_p (last)
6315 && gimple_call_internal_unique_p (last))
6316 return false;
6319 for (gimple_stmt_iterator gsi = gsi_start_bb (CONST_CAST_BB (bb));
6320 !gsi_end_p (gsi); gsi_next (&gsi))
6322 gimple *g = gsi_stmt (gsi);
6324 /* An IFN_GOMP_SIMT_ENTER_ALLOC/IFN_GOMP_SIMT_EXIT call must be
6325 duplicated as part of its group, or not at all.
6326 The IFN_GOMP_SIMT_VOTE_ANY and IFN_GOMP_SIMT_XCHG_* are part of such a
6327 group, so the same holds there. */
6328 if (is_gimple_call (g)
6329 && (gimple_call_internal_p (g, IFN_GOMP_SIMT_ENTER_ALLOC)
6330 || gimple_call_internal_p (g, IFN_GOMP_SIMT_EXIT)
6331 || gimple_call_internal_p (g, IFN_GOMP_SIMT_VOTE_ANY)
6332 || gimple_call_internal_p (g, IFN_GOMP_SIMT_XCHG_BFLY)
6333 || gimple_call_internal_p (g, IFN_GOMP_SIMT_XCHG_IDX)))
6334 return false;
6337 return true;
6340 /* Create a duplicate of the basic block BB. NOTE: This does not
6341 preserve SSA form. */
6343 static basic_block
6344 gimple_duplicate_bb (basic_block bb, copy_bb_data *id)
6346 basic_block new_bb;
6347 gimple_stmt_iterator gsi_tgt;
6349 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6351 /* Copy the PHI nodes. We ignore PHI node arguments here because
6352 the incoming edges have not been setup yet. */
6353 for (gphi_iterator gpi = gsi_start_phis (bb);
6354 !gsi_end_p (gpi);
6355 gsi_next (&gpi))
6357 gphi *phi, *copy;
6358 phi = gpi.phi ();
6359 copy = create_phi_node (NULL_TREE, new_bb);
6360 create_new_def_for (gimple_phi_result (phi), copy,
6361 gimple_phi_result_ptr (copy));
6362 gimple_set_uid (copy, gimple_uid (phi));
6365 gsi_tgt = gsi_start_bb (new_bb);
6366 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6367 !gsi_end_p (gsi);
6368 gsi_next (&gsi))
6370 def_operand_p def_p;
6371 ssa_op_iter op_iter;
6372 tree lhs;
6373 gimple *stmt, *copy;
6375 stmt = gsi_stmt (gsi);
6376 if (gimple_code (stmt) == GIMPLE_LABEL)
6377 continue;
6379 /* Don't duplicate label debug stmts. */
6380 if (gimple_debug_bind_p (stmt)
6381 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6382 == LABEL_DECL)
6383 continue;
6385 /* Create a new copy of STMT and duplicate STMT's virtual
6386 operands. */
6387 copy = gimple_copy (stmt);
6388 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6390 maybe_duplicate_eh_stmt (copy, stmt);
6391 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6393 /* When copying around a stmt writing into a local non-user
6394 aggregate, make sure it won't share stack slot with other
6395 vars. */
6396 lhs = gimple_get_lhs (stmt);
6397 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6399 tree base = get_base_address (lhs);
6400 if (base
6401 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6402 && DECL_IGNORED_P (base)
6403 && !TREE_STATIC (base)
6404 && !DECL_EXTERNAL (base)
6405 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6406 DECL_NONSHAREABLE (base) = 1;
6409 /* If requested remap dependence info of cliques brought in
6410 via inlining. */
6411 if (id)
6412 for (unsigned i = 0; i < gimple_num_ops (copy); ++i)
6414 tree op = gimple_op (copy, i);
6415 if (!op)
6416 continue;
6417 if (TREE_CODE (op) == ADDR_EXPR
6418 || TREE_CODE (op) == WITH_SIZE_EXPR)
6419 op = TREE_OPERAND (op, 0);
6420 while (handled_component_p (op))
6421 op = TREE_OPERAND (op, 0);
6422 if ((TREE_CODE (op) == MEM_REF
6423 || TREE_CODE (op) == TARGET_MEM_REF)
6424 && MR_DEPENDENCE_CLIQUE (op) > 1
6425 && MR_DEPENDENCE_CLIQUE (op) != bb->loop_father->owned_clique)
6427 if (!id->dependence_map)
6428 id->dependence_map = new hash_map<dependence_hash,
6429 unsigned short>;
6430 bool existed;
6431 unsigned short &newc = id->dependence_map->get_or_insert
6432 (MR_DEPENDENCE_CLIQUE (op), &existed);
6433 if (!existed)
6435 gcc_assert (MR_DEPENDENCE_CLIQUE (op) <= cfun->last_clique);
6436 newc = ++cfun->last_clique;
6438 MR_DEPENDENCE_CLIQUE (op) = newc;
6442 /* Create new names for all the definitions created by COPY and
6443 add replacement mappings for each new name. */
6444 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6445 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6448 return new_bb;
6451 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6453 static void
6454 add_phi_args_after_copy_edge (edge e_copy)
6456 basic_block bb, bb_copy = e_copy->src, dest;
6457 edge e;
6458 edge_iterator ei;
6459 gphi *phi, *phi_copy;
6460 tree def;
6461 gphi_iterator psi, psi_copy;
6463 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6464 return;
6466 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6468 if (e_copy->dest->flags & BB_DUPLICATED)
6469 dest = get_bb_original (e_copy->dest);
6470 else
6471 dest = e_copy->dest;
6473 e = find_edge (bb, dest);
6474 if (!e)
6476 /* During loop unrolling the target of the latch edge is copied.
6477 In this case we are not looking for edge to dest, but to
6478 duplicated block whose original was dest. */
6479 FOR_EACH_EDGE (e, ei, bb->succs)
6481 if ((e->dest->flags & BB_DUPLICATED)
6482 && get_bb_original (e->dest) == dest)
6483 break;
6486 gcc_assert (e != NULL);
6489 for (psi = gsi_start_phis (e->dest),
6490 psi_copy = gsi_start_phis (e_copy->dest);
6491 !gsi_end_p (psi);
6492 gsi_next (&psi), gsi_next (&psi_copy))
6494 phi = psi.phi ();
6495 phi_copy = psi_copy.phi ();
6496 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6497 add_phi_arg (phi_copy, def, e_copy,
6498 gimple_phi_arg_location_from_edge (phi, e));
6503 /* Basic block BB_COPY was created by code duplication. Add phi node
6504 arguments for edges going out of BB_COPY. The blocks that were
6505 duplicated have BB_DUPLICATED set. */
6507 void
6508 add_phi_args_after_copy_bb (basic_block bb_copy)
6510 edge e_copy;
6511 edge_iterator ei;
6513 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6515 add_phi_args_after_copy_edge (e_copy);
6519 /* Blocks in REGION_COPY array of length N_REGION were created by
6520 duplication of basic blocks. Add phi node arguments for edges
6521 going from these blocks. If E_COPY is not NULL, also add
6522 phi node arguments for its destination.*/
6524 void
6525 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6526 edge e_copy)
6528 unsigned i;
6530 for (i = 0; i < n_region; i++)
6531 region_copy[i]->flags |= BB_DUPLICATED;
6533 for (i = 0; i < n_region; i++)
6534 add_phi_args_after_copy_bb (region_copy[i]);
6535 if (e_copy)
6536 add_phi_args_after_copy_edge (e_copy);
6538 for (i = 0; i < n_region; i++)
6539 region_copy[i]->flags &= ~BB_DUPLICATED;
6542 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6543 important exit edge EXIT. By important we mean that no SSA name defined
6544 inside region is live over the other exit edges of the region. All entry
6545 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6546 to the duplicate of the region. Dominance and loop information is
6547 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6548 UPDATE_DOMINANCE is false then we assume that the caller will update the
6549 dominance information after calling this function. The new basic
6550 blocks are stored to REGION_COPY in the same order as they had in REGION,
6551 provided that REGION_COPY is not NULL.
6552 The function returns false if it is unable to copy the region,
6553 true otherwise. */
6555 bool
6556 gimple_duplicate_sese_region (edge entry, edge exit,
6557 basic_block *region, unsigned n_region,
6558 basic_block *region_copy,
6559 bool update_dominance)
6561 unsigned i;
6562 bool free_region_copy = false, copying_header = false;
6563 class loop *loop = entry->dest->loop_father;
6564 edge exit_copy;
6565 edge redirected;
6566 profile_count total_count = profile_count::uninitialized ();
6567 profile_count entry_count = profile_count::uninitialized ();
6569 if (!can_copy_bbs_p (region, n_region))
6570 return false;
6572 /* Some sanity checking. Note that we do not check for all possible
6573 missuses of the functions. I.e. if you ask to copy something weird,
6574 it will work, but the state of structures probably will not be
6575 correct. */
6576 for (i = 0; i < n_region; i++)
6578 /* We do not handle subloops, i.e. all the blocks must belong to the
6579 same loop. */
6580 if (region[i]->loop_father != loop)
6581 return false;
6583 if (region[i] != entry->dest
6584 && region[i] == loop->header)
6585 return false;
6588 /* In case the function is used for loop header copying (which is the primary
6589 use), ensure that EXIT and its copy will be new latch and entry edges. */
6590 if (loop->header == entry->dest)
6592 copying_header = true;
6594 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6595 return false;
6597 for (i = 0; i < n_region; i++)
6598 if (region[i] != exit->src
6599 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6600 return false;
6603 initialize_original_copy_tables ();
6605 if (copying_header)
6606 set_loop_copy (loop, loop_outer (loop));
6607 else
6608 set_loop_copy (loop, loop);
6610 if (!region_copy)
6612 region_copy = XNEWVEC (basic_block, n_region);
6613 free_region_copy = true;
6616 /* Record blocks outside the region that are dominated by something
6617 inside. */
6618 auto_vec<basic_block> doms;
6619 if (update_dominance)
6621 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6624 if (entry->dest->count.initialized_p ())
6626 total_count = entry->dest->count;
6627 entry_count = entry->count ();
6628 /* Fix up corner cases, to avoid division by zero or creation of negative
6629 frequencies. */
6630 if (entry_count > total_count)
6631 entry_count = total_count;
6634 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6635 split_edge_bb_loc (entry), update_dominance);
6636 if (total_count.initialized_p () && entry_count.initialized_p ())
6638 scale_bbs_frequencies_profile_count (region, n_region,
6639 total_count - entry_count,
6640 total_count);
6641 scale_bbs_frequencies_profile_count (region_copy, n_region, entry_count,
6642 total_count);
6645 if (copying_header)
6647 loop->header = exit->dest;
6648 loop->latch = exit->src;
6651 /* Redirect the entry and add the phi node arguments. */
6652 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6653 gcc_assert (redirected != NULL);
6654 flush_pending_stmts (entry);
6656 /* Concerning updating of dominators: We must recount dominators
6657 for entry block and its copy. Anything that is outside of the
6658 region, but was dominated by something inside needs recounting as
6659 well. */
6660 if (update_dominance)
6662 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6663 doms.safe_push (get_bb_original (entry->dest));
6664 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6667 /* Add the other PHI node arguments. */
6668 add_phi_args_after_copy (region_copy, n_region, NULL);
6670 if (free_region_copy)
6671 free (region_copy);
6673 free_original_copy_tables ();
6674 return true;
6677 /* Checks if BB is part of the region defined by N_REGION BBS. */
6678 static bool
6679 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6681 unsigned int n;
6683 for (n = 0; n < n_region; n++)
6685 if (bb == bbs[n])
6686 return true;
6688 return false;
6691 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6692 are stored to REGION_COPY in the same order in that they appear
6693 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6694 the region, EXIT an exit from it. The condition guarding EXIT
6695 is moved to ENTRY. Returns true if duplication succeeds, false
6696 otherwise.
6698 For example,
6700 some_code;
6701 if (cond)
6703 else
6706 is transformed to
6708 if (cond)
6710 some_code;
6713 else
6715 some_code;
6720 bool
6721 gimple_duplicate_sese_tail (edge entry, edge exit,
6722 basic_block *region, unsigned n_region,
6723 basic_block *region_copy)
6725 unsigned i;
6726 bool free_region_copy = false;
6727 class loop *loop = exit->dest->loop_father;
6728 class loop *orig_loop = entry->dest->loop_father;
6729 basic_block switch_bb, entry_bb, nentry_bb;
6730 profile_count total_count = profile_count::uninitialized (),
6731 exit_count = profile_count::uninitialized ();
6732 edge exits[2], nexits[2], e;
6733 gimple_stmt_iterator gsi;
6734 gimple *cond_stmt;
6735 edge sorig, snew;
6736 basic_block exit_bb;
6737 gphi_iterator psi;
6738 gphi *phi;
6739 tree def;
6740 class loop *target, *aloop, *cloop;
6742 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6743 exits[0] = exit;
6744 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6746 if (!can_copy_bbs_p (region, n_region))
6747 return false;
6749 initialize_original_copy_tables ();
6750 set_loop_copy (orig_loop, loop);
6752 target= loop;
6753 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6755 if (bb_part_of_region_p (aloop->header, region, n_region))
6757 cloop = duplicate_loop (aloop, target);
6758 duplicate_subloops (aloop, cloop);
6762 if (!region_copy)
6764 region_copy = XNEWVEC (basic_block, n_region);
6765 free_region_copy = true;
6768 gcc_assert (!need_ssa_update_p (cfun));
6770 /* Record blocks outside the region that are dominated by something
6771 inside. */
6772 auto_vec<basic_block> doms = get_dominated_by_region (CDI_DOMINATORS, region,
6773 n_region);
6775 total_count = exit->src->count;
6776 exit_count = exit->count ();
6777 /* Fix up corner cases, to avoid division by zero or creation of negative
6778 frequencies. */
6779 if (exit_count > total_count)
6780 exit_count = total_count;
6782 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6783 split_edge_bb_loc (exit), true);
6784 if (total_count.initialized_p () && exit_count.initialized_p ())
6786 scale_bbs_frequencies_profile_count (region, n_region,
6787 total_count - exit_count,
6788 total_count);
6789 scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6790 total_count);
6793 /* Create the switch block, and put the exit condition to it. */
6794 entry_bb = entry->dest;
6795 nentry_bb = get_bb_copy (entry_bb);
6796 if (!last_stmt (entry->src)
6797 || !stmt_ends_bb_p (last_stmt (entry->src)))
6798 switch_bb = entry->src;
6799 else
6800 switch_bb = split_edge (entry);
6801 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6803 gsi = gsi_last_bb (switch_bb);
6804 cond_stmt = last_stmt (exit->src);
6805 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6806 cond_stmt = gimple_copy (cond_stmt);
6808 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6810 sorig = single_succ_edge (switch_bb);
6811 sorig->flags = exits[1]->flags;
6812 sorig->probability = exits[1]->probability;
6813 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6814 snew->probability = exits[0]->probability;
6817 /* Register the new edge from SWITCH_BB in loop exit lists. */
6818 rescan_loop_exit (snew, true, false);
6820 /* Add the PHI node arguments. */
6821 add_phi_args_after_copy (region_copy, n_region, snew);
6823 /* Get rid of now superfluous conditions and associated edges (and phi node
6824 arguments). */
6825 exit_bb = exit->dest;
6827 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6828 PENDING_STMT (e) = NULL;
6830 /* The latch of ORIG_LOOP was copied, and so was the backedge
6831 to the original header. We redirect this backedge to EXIT_BB. */
6832 for (i = 0; i < n_region; i++)
6833 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6835 gcc_assert (single_succ_edge (region_copy[i]));
6836 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6837 PENDING_STMT (e) = NULL;
6838 for (psi = gsi_start_phis (exit_bb);
6839 !gsi_end_p (psi);
6840 gsi_next (&psi))
6842 phi = psi.phi ();
6843 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6844 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6847 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6848 PENDING_STMT (e) = NULL;
6850 /* Anything that is outside of the region, but was dominated by something
6851 inside needs to update dominance info. */
6852 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6853 /* Update the SSA web. */
6854 update_ssa (TODO_update_ssa);
6856 if (free_region_copy)
6857 free (region_copy);
6859 free_original_copy_tables ();
6860 return true;
6863 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6864 adding blocks when the dominator traversal reaches EXIT. This
6865 function silently assumes that ENTRY strictly dominates EXIT. */
6867 void
6868 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6869 vec<basic_block> *bbs_p)
6871 basic_block son;
6873 for (son = first_dom_son (CDI_DOMINATORS, entry);
6874 son;
6875 son = next_dom_son (CDI_DOMINATORS, son))
6877 bbs_p->safe_push (son);
6878 if (son != exit)
6879 gather_blocks_in_sese_region (son, exit, bbs_p);
6883 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6884 The duplicates are recorded in VARS_MAP. */
6886 static void
6887 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6888 tree to_context)
6890 tree t = *tp, new_t;
6891 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6893 if (DECL_CONTEXT (t) == to_context)
6894 return;
6896 bool existed;
6897 tree &loc = vars_map->get_or_insert (t, &existed);
6899 if (!existed)
6901 if (SSA_VAR_P (t))
6903 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6904 add_local_decl (f, new_t);
6906 else
6908 gcc_assert (TREE_CODE (t) == CONST_DECL);
6909 new_t = copy_node (t);
6911 DECL_CONTEXT (new_t) = to_context;
6913 loc = new_t;
6915 else
6916 new_t = loc;
6918 *tp = new_t;
6922 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6923 VARS_MAP maps old ssa names and var_decls to the new ones. */
6925 static tree
6926 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6927 tree to_context)
6929 tree new_name;
6931 gcc_assert (!virtual_operand_p (name));
6933 tree *loc = vars_map->get (name);
6935 if (!loc)
6937 tree decl = SSA_NAME_VAR (name);
6938 if (decl)
6940 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6941 replace_by_duplicate_decl (&decl, vars_map, to_context);
6942 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6943 decl, SSA_NAME_DEF_STMT (name));
6945 else
6946 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6947 name, SSA_NAME_DEF_STMT (name));
6949 /* Now that we've used the def stmt to define new_name, make sure it
6950 doesn't define name anymore. */
6951 SSA_NAME_DEF_STMT (name) = NULL;
6953 vars_map->put (name, new_name);
6955 else
6956 new_name = *loc;
6958 return new_name;
6961 struct move_stmt_d
6963 tree orig_block;
6964 tree new_block;
6965 tree from_context;
6966 tree to_context;
6967 hash_map<tree, tree> *vars_map;
6968 htab_t new_label_map;
6969 hash_map<void *, void *> *eh_map;
6970 bool remap_decls_p;
6973 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6974 contained in *TP if it has been ORIG_BLOCK previously and change the
6975 DECL_CONTEXT of every local variable referenced in *TP. */
6977 static tree
6978 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6980 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6981 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6982 tree t = *tp;
6984 if (EXPR_P (t))
6986 tree block = TREE_BLOCK (t);
6987 if (block == NULL_TREE)
6989 else if (block == p->orig_block
6990 || p->orig_block == NULL_TREE)
6992 /* tree_node_can_be_shared says we can share invariant
6993 addresses but unshare_expr copies them anyways. Make sure
6994 to unshare before adjusting the block in place - we do not
6995 always see a copy here. */
6996 if (TREE_CODE (t) == ADDR_EXPR
6997 && is_gimple_min_invariant (t))
6998 *tp = t = unshare_expr (t);
6999 TREE_SET_BLOCK (t, p->new_block);
7001 else if (flag_checking)
7003 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
7004 block = BLOCK_SUPERCONTEXT (block);
7005 gcc_assert (block == p->orig_block);
7008 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
7010 if (TREE_CODE (t) == SSA_NAME)
7011 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
7012 else if (TREE_CODE (t) == PARM_DECL
7013 && gimple_in_ssa_p (cfun))
7014 *tp = *(p->vars_map->get (t));
7015 else if (TREE_CODE (t) == LABEL_DECL)
7017 if (p->new_label_map)
7019 struct tree_map in, *out;
7020 in.base.from = t;
7021 out = (struct tree_map *)
7022 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
7023 if (out)
7024 *tp = t = out->to;
7027 /* For FORCED_LABELs we can end up with references from other
7028 functions if some SESE regions are outlined. It is UB to
7029 jump in between them, but they could be used just for printing
7030 addresses etc. In that case, DECL_CONTEXT on the label should
7031 be the function containing the glabel stmt with that LABEL_DECL,
7032 rather than whatever function a reference to the label was seen
7033 last time. */
7034 if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
7035 DECL_CONTEXT (t) = p->to_context;
7037 else if (p->remap_decls_p)
7039 /* Replace T with its duplicate. T should no longer appear in the
7040 parent function, so this looks wasteful; however, it may appear
7041 in referenced_vars, and more importantly, as virtual operands of
7042 statements, and in alias lists of other variables. It would be
7043 quite difficult to expunge it from all those places. ??? It might
7044 suffice to do this for addressable variables. */
7045 if ((VAR_P (t) && !is_global_var (t))
7046 || TREE_CODE (t) == CONST_DECL)
7047 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
7049 *walk_subtrees = 0;
7051 else if (TYPE_P (t))
7052 *walk_subtrees = 0;
7054 return NULL_TREE;
7057 /* Helper for move_stmt_r. Given an EH region number for the source
7058 function, map that to the duplicate EH regio number in the dest. */
7060 static int
7061 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
7063 eh_region old_r, new_r;
7065 old_r = get_eh_region_from_number (old_nr);
7066 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
7068 return new_r->index;
7071 /* Similar, but operate on INTEGER_CSTs. */
7073 static tree
7074 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
7076 int old_nr, new_nr;
7078 old_nr = tree_to_shwi (old_t_nr);
7079 new_nr = move_stmt_eh_region_nr (old_nr, p);
7081 return build_int_cst (integer_type_node, new_nr);
7084 /* Like move_stmt_op, but for gimple statements.
7086 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
7087 contained in the current statement in *GSI_P and change the
7088 DECL_CONTEXT of every local variable referenced in the current
7089 statement. */
7091 static tree
7092 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
7093 struct walk_stmt_info *wi)
7095 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
7096 gimple *stmt = gsi_stmt (*gsi_p);
7097 tree block = gimple_block (stmt);
7099 if (block == p->orig_block
7100 || (p->orig_block == NULL_TREE
7101 && block != NULL_TREE))
7102 gimple_set_block (stmt, p->new_block);
7104 switch (gimple_code (stmt))
7106 case GIMPLE_CALL:
7107 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
7109 tree r, fndecl = gimple_call_fndecl (stmt);
7110 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
7111 switch (DECL_FUNCTION_CODE (fndecl))
7113 case BUILT_IN_EH_COPY_VALUES:
7114 r = gimple_call_arg (stmt, 1);
7115 r = move_stmt_eh_region_tree_nr (r, p);
7116 gimple_call_set_arg (stmt, 1, r);
7117 /* FALLTHRU */
7119 case BUILT_IN_EH_POINTER:
7120 case BUILT_IN_EH_FILTER:
7121 r = gimple_call_arg (stmt, 0);
7122 r = move_stmt_eh_region_tree_nr (r, p);
7123 gimple_call_set_arg (stmt, 0, r);
7124 break;
7126 default:
7127 break;
7130 break;
7132 case GIMPLE_RESX:
7134 gresx *resx_stmt = as_a <gresx *> (stmt);
7135 int r = gimple_resx_region (resx_stmt);
7136 r = move_stmt_eh_region_nr (r, p);
7137 gimple_resx_set_region (resx_stmt, r);
7139 break;
7141 case GIMPLE_EH_DISPATCH:
7143 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
7144 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
7145 r = move_stmt_eh_region_nr (r, p);
7146 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
7148 break;
7150 case GIMPLE_OMP_RETURN:
7151 case GIMPLE_OMP_CONTINUE:
7152 break;
7154 case GIMPLE_LABEL:
7156 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
7157 so that such labels can be referenced from other regions.
7158 Make sure to update it when seeing a GIMPLE_LABEL though,
7159 that is the owner of the label. */
7160 walk_gimple_op (stmt, move_stmt_op, wi);
7161 *handled_ops_p = true;
7162 tree label = gimple_label_label (as_a <glabel *> (stmt));
7163 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
7164 DECL_CONTEXT (label) = p->to_context;
7166 break;
7168 default:
7169 if (is_gimple_omp (stmt))
7171 /* Do not remap variables inside OMP directives. Variables
7172 referenced in clauses and directive header belong to the
7173 parent function and should not be moved into the child
7174 function. */
7175 bool save_remap_decls_p = p->remap_decls_p;
7176 p->remap_decls_p = false;
7177 *handled_ops_p = true;
7179 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
7180 move_stmt_op, wi);
7182 p->remap_decls_p = save_remap_decls_p;
7184 break;
7187 return NULL_TREE;
7190 /* Move basic block BB from function CFUN to function DEST_FN. The
7191 block is moved out of the original linked list and placed after
7192 block AFTER in the new list. Also, the block is removed from the
7193 original array of blocks and placed in DEST_FN's array of blocks.
7194 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
7195 updated to reflect the moved edges.
7197 The local variables are remapped to new instances, VARS_MAP is used
7198 to record the mapping. */
7200 static void
7201 move_block_to_fn (struct function *dest_cfun, basic_block bb,
7202 basic_block after, bool update_edge_count_p,
7203 struct move_stmt_d *d)
7205 struct control_flow_graph *cfg;
7206 edge_iterator ei;
7207 edge e;
7208 gimple_stmt_iterator si;
7209 unsigned old_len;
7211 /* Remove BB from dominance structures. */
7212 delete_from_dominance_info (CDI_DOMINATORS, bb);
7214 /* Move BB from its current loop to the copy in the new function. */
7215 if (current_loops)
7217 class loop *new_loop = (class loop *)bb->loop_father->aux;
7218 if (new_loop)
7219 bb->loop_father = new_loop;
7222 /* Link BB to the new linked list. */
7223 move_block_after (bb, after);
7225 /* Update the edge count in the corresponding flowgraphs. */
7226 if (update_edge_count_p)
7227 FOR_EACH_EDGE (e, ei, bb->succs)
7229 cfun->cfg->x_n_edges--;
7230 dest_cfun->cfg->x_n_edges++;
7233 /* Remove BB from the original basic block array. */
7234 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
7235 cfun->cfg->x_n_basic_blocks--;
7237 /* Grow DEST_CFUN's basic block array if needed. */
7238 cfg = dest_cfun->cfg;
7239 cfg->x_n_basic_blocks++;
7240 if (bb->index >= cfg->x_last_basic_block)
7241 cfg->x_last_basic_block = bb->index + 1;
7243 old_len = vec_safe_length (cfg->x_basic_block_info);
7244 if ((unsigned) cfg->x_last_basic_block >= old_len)
7245 vec_safe_grow_cleared (cfg->x_basic_block_info,
7246 cfg->x_last_basic_block + 1);
7248 (*cfg->x_basic_block_info)[bb->index] = bb;
7250 /* Remap the variables in phi nodes. */
7251 for (gphi_iterator psi = gsi_start_phis (bb);
7252 !gsi_end_p (psi); )
7254 gphi *phi = psi.phi ();
7255 use_operand_p use;
7256 tree op = PHI_RESULT (phi);
7257 ssa_op_iter oi;
7258 unsigned i;
7260 if (virtual_operand_p (op))
7262 /* Remove the phi nodes for virtual operands (alias analysis will be
7263 run for the new function, anyway). But replace all uses that
7264 might be outside of the region we move. */
7265 use_operand_p use_p;
7266 imm_use_iterator iter;
7267 gimple *use_stmt;
7268 FOR_EACH_IMM_USE_STMT (use_stmt, iter, op)
7269 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7270 SET_USE (use_p, SSA_NAME_VAR (op));
7271 remove_phi_node (&psi, true);
7272 continue;
7275 SET_PHI_RESULT (phi,
7276 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7277 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
7279 op = USE_FROM_PTR (use);
7280 if (TREE_CODE (op) == SSA_NAME)
7281 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7284 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
7286 location_t locus = gimple_phi_arg_location (phi, i);
7287 tree block = LOCATION_BLOCK (locus);
7289 if (locus == UNKNOWN_LOCATION)
7290 continue;
7291 if (d->orig_block == NULL_TREE || block == d->orig_block)
7293 locus = set_block (locus, d->new_block);
7294 gimple_phi_arg_set_location (phi, i, locus);
7298 gsi_next (&psi);
7301 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7303 gimple *stmt = gsi_stmt (si);
7304 struct walk_stmt_info wi;
7306 memset (&wi, 0, sizeof (wi));
7307 wi.info = d;
7308 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
7310 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
7312 tree label = gimple_label_label (label_stmt);
7313 int uid = LABEL_DECL_UID (label);
7315 gcc_assert (uid > -1);
7317 old_len = vec_safe_length (cfg->x_label_to_block_map);
7318 if (old_len <= (unsigned) uid)
7319 vec_safe_grow_cleared (cfg->x_label_to_block_map, uid + 1);
7321 (*cfg->x_label_to_block_map)[uid] = bb;
7322 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
7324 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
7326 if (uid >= dest_cfun->cfg->last_label_uid)
7327 dest_cfun->cfg->last_label_uid = uid + 1;
7330 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
7331 remove_stmt_from_eh_lp_fn (cfun, stmt);
7333 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
7334 gimple_remove_stmt_histograms (cfun, stmt);
7336 /* We cannot leave any operands allocated from the operand caches of
7337 the current function. */
7338 free_stmt_operands (cfun, stmt);
7339 push_cfun (dest_cfun);
7340 update_stmt (stmt);
7341 if (is_gimple_call (stmt))
7342 notice_special_calls (as_a <gcall *> (stmt));
7343 pop_cfun ();
7346 FOR_EACH_EDGE (e, ei, bb->succs)
7347 if (e->goto_locus != UNKNOWN_LOCATION)
7349 tree block = LOCATION_BLOCK (e->goto_locus);
7350 if (d->orig_block == NULL_TREE
7351 || block == d->orig_block)
7352 e->goto_locus = set_block (e->goto_locus, d->new_block);
7356 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7357 the outermost EH region. Use REGION as the incoming base EH region.
7358 If there is no single outermost region, return NULL and set *ALL to
7359 true. */
7361 static eh_region
7362 find_outermost_region_in_block (struct function *src_cfun,
7363 basic_block bb, eh_region region,
7364 bool *all)
7366 gimple_stmt_iterator si;
7368 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7370 gimple *stmt = gsi_stmt (si);
7371 eh_region stmt_region;
7372 int lp_nr;
7374 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7375 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7376 if (stmt_region)
7378 if (region == NULL)
7379 region = stmt_region;
7380 else if (stmt_region != region)
7382 region = eh_region_outermost (src_cfun, stmt_region, region);
7383 if (region == NULL)
7385 *all = true;
7386 return NULL;
7392 return region;
7395 static tree
7396 new_label_mapper (tree decl, void *data)
7398 htab_t hash = (htab_t) data;
7399 struct tree_map *m;
7400 void **slot;
7402 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7404 m = XNEW (struct tree_map);
7405 m->hash = DECL_UID (decl);
7406 m->base.from = decl;
7407 m->to = create_artificial_label (UNKNOWN_LOCATION);
7408 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7409 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7410 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7412 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7413 gcc_assert (*slot == NULL);
7415 *slot = m;
7417 return m->to;
7420 /* Tree walker to replace the decls used inside value expressions by
7421 duplicates. */
7423 static tree
7424 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7426 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7428 switch (TREE_CODE (*tp))
7430 case VAR_DECL:
7431 case PARM_DECL:
7432 case RESULT_DECL:
7433 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7434 break;
7435 default:
7436 break;
7439 if (IS_TYPE_OR_DECL_P (*tp))
7440 *walk_subtrees = false;
7442 return NULL;
7445 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7446 subblocks. */
7448 static void
7449 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7450 tree to_context)
7452 tree *tp, t;
7454 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7456 t = *tp;
7457 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7458 continue;
7459 replace_by_duplicate_decl (&t, vars_map, to_context);
7460 if (t != *tp)
7462 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7464 tree x = DECL_VALUE_EXPR (*tp);
7465 struct replace_decls_d rd = { vars_map, to_context };
7466 unshare_expr (x);
7467 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7468 SET_DECL_VALUE_EXPR (t, x);
7469 DECL_HAS_VALUE_EXPR_P (t) = 1;
7471 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7472 *tp = t;
7476 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7477 replace_block_vars_by_duplicates (block, vars_map, to_context);
7480 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7481 from FN1 to FN2. */
7483 static void
7484 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7485 class loop *loop)
7487 /* Discard it from the old loop array. */
7488 (*get_loops (fn1))[loop->num] = NULL;
7490 /* Place it in the new loop array, assigning it a new number. */
7491 loop->num = number_of_loops (fn2);
7492 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7494 /* Recurse to children. */
7495 for (loop = loop->inner; loop; loop = loop->next)
7496 fixup_loop_arrays_after_move (fn1, fn2, loop);
7499 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7500 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7502 DEBUG_FUNCTION void
7503 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7505 basic_block bb;
7506 edge_iterator ei;
7507 edge e;
7508 bitmap bbs = BITMAP_ALLOC (NULL);
7509 int i;
7511 gcc_assert (entry != NULL);
7512 gcc_assert (entry != exit);
7513 gcc_assert (bbs_p != NULL);
7515 gcc_assert (bbs_p->length () > 0);
7517 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7518 bitmap_set_bit (bbs, bb->index);
7520 gcc_assert (bitmap_bit_p (bbs, entry->index));
7521 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7523 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7525 if (bb == entry)
7527 gcc_assert (single_pred_p (entry));
7528 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7530 else
7531 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7533 e = ei_edge (ei);
7534 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7537 if (bb == exit)
7539 gcc_assert (single_succ_p (exit));
7540 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7542 else
7543 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7545 e = ei_edge (ei);
7546 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7550 BITMAP_FREE (bbs);
7553 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7555 bool
7556 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7558 bitmap release_names = (bitmap)data;
7560 if (TREE_CODE (from) != SSA_NAME)
7561 return true;
7563 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7564 return true;
7567 /* Return LOOP_DIST_ALIAS call if present in BB. */
7569 static gimple *
7570 find_loop_dist_alias (basic_block bb)
7572 gimple *g = last_stmt (bb);
7573 if (g == NULL || gimple_code (g) != GIMPLE_COND)
7574 return NULL;
7576 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7577 gsi_prev (&gsi);
7578 if (gsi_end_p (gsi))
7579 return NULL;
7581 g = gsi_stmt (gsi);
7582 if (gimple_call_internal_p (g, IFN_LOOP_DIST_ALIAS))
7583 return g;
7584 return NULL;
7587 /* Fold loop internal call G like IFN_LOOP_VECTORIZED/IFN_LOOP_DIST_ALIAS
7588 to VALUE and update any immediate uses of it's LHS. */
7590 void
7591 fold_loop_internal_call (gimple *g, tree value)
7593 tree lhs = gimple_call_lhs (g);
7594 use_operand_p use_p;
7595 imm_use_iterator iter;
7596 gimple *use_stmt;
7597 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7599 replace_call_with_value (&gsi, value);
7600 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
7602 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7603 SET_USE (use_p, value);
7604 update_stmt (use_stmt);
7608 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7609 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7610 single basic block in the original CFG and the new basic block is
7611 returned. DEST_CFUN must not have a CFG yet.
7613 Note that the region need not be a pure SESE region. Blocks inside
7614 the region may contain calls to abort/exit. The only restriction
7615 is that ENTRY_BB should be the only entry point and it must
7616 dominate EXIT_BB.
7618 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7619 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7620 to the new function.
7622 All local variables referenced in the region are assumed to be in
7623 the corresponding BLOCK_VARS and unexpanded variable lists
7624 associated with DEST_CFUN.
7626 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7627 reimplement move_sese_region_to_fn by duplicating the region rather than
7628 moving it. */
7630 basic_block
7631 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7632 basic_block exit_bb, tree orig_block)
7634 vec<basic_block> bbs;
7635 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7636 basic_block after, bb, *entry_pred, *exit_succ, abb;
7637 struct function *saved_cfun = cfun;
7638 int *entry_flag, *exit_flag;
7639 profile_probability *entry_prob, *exit_prob;
7640 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7641 edge e;
7642 edge_iterator ei;
7643 htab_t new_label_map;
7644 hash_map<void *, void *> *eh_map;
7645 class loop *loop = entry_bb->loop_father;
7646 class loop *loop0 = get_loop (saved_cfun, 0);
7647 struct move_stmt_d d;
7649 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7650 region. */
7651 gcc_assert (entry_bb != exit_bb
7652 && (!exit_bb
7653 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7655 /* Collect all the blocks in the region. Manually add ENTRY_BB
7656 because it won't be added by dfs_enumerate_from. */
7657 bbs.create (0);
7658 bbs.safe_push (entry_bb);
7659 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7661 if (flag_checking)
7662 verify_sese (entry_bb, exit_bb, &bbs);
7664 /* The blocks that used to be dominated by something in BBS will now be
7665 dominated by the new block. */
7666 auto_vec<basic_block> dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7667 bbs.address (),
7668 bbs.length ());
7670 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7671 the predecessor edges to ENTRY_BB and the successor edges to
7672 EXIT_BB so that we can re-attach them to the new basic block that
7673 will replace the region. */
7674 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7675 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7676 entry_flag = XNEWVEC (int, num_entry_edges);
7677 entry_prob = XNEWVEC (profile_probability, num_entry_edges);
7678 i = 0;
7679 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7681 entry_prob[i] = e->probability;
7682 entry_flag[i] = e->flags;
7683 entry_pred[i++] = e->src;
7684 remove_edge (e);
7687 if (exit_bb)
7689 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7690 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7691 exit_flag = XNEWVEC (int, num_exit_edges);
7692 exit_prob = XNEWVEC (profile_probability, num_exit_edges);
7693 i = 0;
7694 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7696 exit_prob[i] = e->probability;
7697 exit_flag[i] = e->flags;
7698 exit_succ[i++] = e->dest;
7699 remove_edge (e);
7702 else
7704 num_exit_edges = 0;
7705 exit_succ = NULL;
7706 exit_flag = NULL;
7707 exit_prob = NULL;
7710 /* Switch context to the child function to initialize DEST_FN's CFG. */
7711 gcc_assert (dest_cfun->cfg == NULL);
7712 push_cfun (dest_cfun);
7714 init_empty_tree_cfg ();
7716 /* Initialize EH information for the new function. */
7717 eh_map = NULL;
7718 new_label_map = NULL;
7719 if (saved_cfun->eh)
7721 eh_region region = NULL;
7722 bool all = false;
7724 FOR_EACH_VEC_ELT (bbs, i, bb)
7726 region = find_outermost_region_in_block (saved_cfun, bb, region, &all);
7727 if (all)
7728 break;
7731 init_eh_for_function ();
7732 if (region != NULL || all)
7734 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7735 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7736 new_label_mapper, new_label_map);
7740 /* Initialize an empty loop tree. */
7741 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7742 init_loops_structure (dest_cfun, loops, 1);
7743 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7744 set_loops_for_fn (dest_cfun, loops);
7746 vec<loop_p, va_gc> *larray = get_loops (saved_cfun)->copy ();
7748 /* Move the outlined loop tree part. */
7749 num_nodes = bbs.length ();
7750 FOR_EACH_VEC_ELT (bbs, i, bb)
7752 if (bb->loop_father->header == bb)
7754 class loop *this_loop = bb->loop_father;
7755 class loop *outer = loop_outer (this_loop);
7756 if (outer == loop
7757 /* If the SESE region contains some bbs ending with
7758 a noreturn call, those are considered to belong
7759 to the outermost loop in saved_cfun, rather than
7760 the entry_bb's loop_father. */
7761 || outer == loop0)
7763 if (outer != loop)
7764 num_nodes -= this_loop->num_nodes;
7765 flow_loop_tree_node_remove (bb->loop_father);
7766 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7767 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7770 else if (bb->loop_father == loop0 && loop0 != loop)
7771 num_nodes--;
7773 /* Remove loop exits from the outlined region. */
7774 if (loops_for_fn (saved_cfun)->exits)
7775 FOR_EACH_EDGE (e, ei, bb->succs)
7777 struct loops *l = loops_for_fn (saved_cfun);
7778 loop_exit **slot
7779 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7780 NO_INSERT);
7781 if (slot)
7782 l->exits->clear_slot (slot);
7786 /* Adjust the number of blocks in the tree root of the outlined part. */
7787 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7789 /* Setup a mapping to be used by move_block_to_fn. */
7790 loop->aux = current_loops->tree_root;
7791 loop0->aux = current_loops->tree_root;
7793 /* Fix up orig_loop_num. If the block referenced in it has been moved
7794 to dest_cfun, update orig_loop_num field, otherwise clear it. */
7795 signed char *moved_orig_loop_num = NULL;
7796 for (auto dloop : loops_list (dest_cfun, 0))
7797 if (dloop->orig_loop_num)
7799 if (moved_orig_loop_num == NULL)
7800 moved_orig_loop_num
7801 = XCNEWVEC (signed char, vec_safe_length (larray));
7802 if ((*larray)[dloop->orig_loop_num] != NULL
7803 && get_loop (saved_cfun, dloop->orig_loop_num) == NULL)
7805 if (moved_orig_loop_num[dloop->orig_loop_num] >= 0
7806 && moved_orig_loop_num[dloop->orig_loop_num] < 2)
7807 moved_orig_loop_num[dloop->orig_loop_num]++;
7808 dloop->orig_loop_num = (*larray)[dloop->orig_loop_num]->num;
7810 else
7812 moved_orig_loop_num[dloop->orig_loop_num] = -1;
7813 dloop->orig_loop_num = 0;
7816 pop_cfun ();
7818 if (moved_orig_loop_num)
7820 FOR_EACH_VEC_ELT (bbs, i, bb)
7822 gimple *g = find_loop_dist_alias (bb);
7823 if (g == NULL)
7824 continue;
7826 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7827 gcc_assert (orig_loop_num
7828 && (unsigned) orig_loop_num < vec_safe_length (larray));
7829 if (moved_orig_loop_num[orig_loop_num] == 2)
7831 /* If we have moved both loops with this orig_loop_num into
7832 dest_cfun and the LOOP_DIST_ALIAS call is being moved there
7833 too, update the first argument. */
7834 gcc_assert ((*larray)[orig_loop_num] != NULL
7835 && (get_loop (saved_cfun, orig_loop_num) == NULL));
7836 tree t = build_int_cst (integer_type_node,
7837 (*larray)[orig_loop_num]->num);
7838 gimple_call_set_arg (g, 0, t);
7839 update_stmt (g);
7840 /* Make sure the following loop will not update it. */
7841 moved_orig_loop_num[orig_loop_num] = 0;
7843 else
7844 /* Otherwise at least one of the loops stayed in saved_cfun.
7845 Remove the LOOP_DIST_ALIAS call. */
7846 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7848 FOR_EACH_BB_FN (bb, saved_cfun)
7850 gimple *g = find_loop_dist_alias (bb);
7851 if (g == NULL)
7852 continue;
7853 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7854 gcc_assert (orig_loop_num
7855 && (unsigned) orig_loop_num < vec_safe_length (larray));
7856 if (moved_orig_loop_num[orig_loop_num])
7857 /* LOOP_DIST_ALIAS call remained in saved_cfun, if at least one
7858 of the corresponding loops was moved, remove it. */
7859 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7861 XDELETEVEC (moved_orig_loop_num);
7863 ggc_free (larray);
7865 /* Move blocks from BBS into DEST_CFUN. */
7866 gcc_assert (bbs.length () >= 2);
7867 after = dest_cfun->cfg->x_entry_block_ptr;
7868 hash_map<tree, tree> vars_map;
7870 memset (&d, 0, sizeof (d));
7871 d.orig_block = orig_block;
7872 d.new_block = DECL_INITIAL (dest_cfun->decl);
7873 d.from_context = cfun->decl;
7874 d.to_context = dest_cfun->decl;
7875 d.vars_map = &vars_map;
7876 d.new_label_map = new_label_map;
7877 d.eh_map = eh_map;
7878 d.remap_decls_p = true;
7880 if (gimple_in_ssa_p (cfun))
7881 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7883 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7884 set_ssa_default_def (dest_cfun, arg, narg);
7885 vars_map.put (arg, narg);
7888 FOR_EACH_VEC_ELT (bbs, i, bb)
7890 /* No need to update edge counts on the last block. It has
7891 already been updated earlier when we detached the region from
7892 the original CFG. */
7893 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7894 after = bb;
7897 /* Adjust the maximum clique used. */
7898 dest_cfun->last_clique = saved_cfun->last_clique;
7900 loop->aux = NULL;
7901 loop0->aux = NULL;
7902 /* Loop sizes are no longer correct, fix them up. */
7903 loop->num_nodes -= num_nodes;
7904 for (class loop *outer = loop_outer (loop);
7905 outer; outer = loop_outer (outer))
7906 outer->num_nodes -= num_nodes;
7907 loop0->num_nodes -= bbs.length () - num_nodes;
7909 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7911 class loop *aloop;
7912 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7913 if (aloop != NULL)
7915 if (aloop->simduid)
7917 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7918 d.to_context);
7919 dest_cfun->has_simduid_loops = true;
7921 if (aloop->force_vectorize)
7922 dest_cfun->has_force_vectorize_loops = true;
7926 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7927 if (orig_block)
7929 tree block;
7930 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7931 == NULL_TREE);
7932 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7933 = BLOCK_SUBBLOCKS (orig_block);
7934 for (block = BLOCK_SUBBLOCKS (orig_block);
7935 block; block = BLOCK_CHAIN (block))
7936 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7937 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7940 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7941 &vars_map, dest_cfun->decl);
7943 if (new_label_map)
7944 htab_delete (new_label_map);
7945 if (eh_map)
7946 delete eh_map;
7948 if (gimple_in_ssa_p (cfun))
7950 /* We need to release ssa-names in a defined order, so first find them,
7951 and then iterate in ascending version order. */
7952 bitmap release_names = BITMAP_ALLOC (NULL);
7953 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7954 bitmap_iterator bi;
7955 unsigned i;
7956 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7957 release_ssa_name (ssa_name (i));
7958 BITMAP_FREE (release_names);
7961 /* Rewire the entry and exit blocks. The successor to the entry
7962 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7963 the child function. Similarly, the predecessor of DEST_FN's
7964 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7965 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7966 various CFG manipulation function get to the right CFG.
7968 FIXME, this is silly. The CFG ought to become a parameter to
7969 these helpers. */
7970 push_cfun (dest_cfun);
7971 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = entry_bb->count;
7972 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7973 if (exit_bb)
7975 make_single_succ_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7976 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = exit_bb->count;
7978 else
7979 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = profile_count::zero ();
7980 pop_cfun ();
7982 /* Back in the original function, the SESE region has disappeared,
7983 create a new basic block in its place. */
7984 bb = create_empty_bb (entry_pred[0]);
7985 if (current_loops)
7986 add_bb_to_loop (bb, loop);
7987 for (i = 0; i < num_entry_edges; i++)
7989 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7990 e->probability = entry_prob[i];
7993 for (i = 0; i < num_exit_edges; i++)
7995 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7996 e->probability = exit_prob[i];
7999 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
8000 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
8001 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
8003 if (exit_bb)
8005 free (exit_prob);
8006 free (exit_flag);
8007 free (exit_succ);
8009 free (entry_prob);
8010 free (entry_flag);
8011 free (entry_pred);
8012 bbs.release ();
8014 return bb;
8017 /* Dump default def DEF to file FILE using FLAGS and indentation
8018 SPC. */
8020 static void
8021 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
8023 for (int i = 0; i < spc; ++i)
8024 fprintf (file, " ");
8025 dump_ssaname_info_to_file (file, def, spc);
8027 print_generic_expr (file, TREE_TYPE (def), flags);
8028 fprintf (file, " ");
8029 print_generic_expr (file, def, flags);
8030 fprintf (file, " = ");
8031 print_generic_expr (file, SSA_NAME_VAR (def), flags);
8032 fprintf (file, ";\n");
8035 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
8037 static void
8038 print_no_sanitize_attr_value (FILE *file, tree value)
8040 unsigned int flags = tree_to_uhwi (value);
8041 bool first = true;
8042 for (int i = 0; sanitizer_opts[i].name != NULL; ++i)
8044 if ((sanitizer_opts[i].flag & flags) == sanitizer_opts[i].flag)
8046 if (!first)
8047 fprintf (file, " | ");
8048 fprintf (file, "%s", sanitizer_opts[i].name);
8049 first = false;
8054 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
8057 void
8058 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
8060 tree arg, var, old_current_fndecl = current_function_decl;
8061 struct function *dsf;
8062 bool ignore_topmost_bind = false, any_var = false;
8063 basic_block bb;
8064 tree chain;
8065 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
8066 && decl_is_tm_clone (fndecl));
8067 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
8069 tree fntype = TREE_TYPE (fndecl);
8070 tree attrs[] = { DECL_ATTRIBUTES (fndecl), TYPE_ATTRIBUTES (fntype) };
8072 for (int i = 0; i != 2; ++i)
8074 if (!attrs[i])
8075 continue;
8077 fprintf (file, "__attribute__((");
8079 bool first = true;
8080 tree chain;
8081 for (chain = attrs[i]; chain; first = false, chain = TREE_CHAIN (chain))
8083 if (!first)
8084 fprintf (file, ", ");
8086 tree name = get_attribute_name (chain);
8087 print_generic_expr (file, name, dump_flags);
8088 if (TREE_VALUE (chain) != NULL_TREE)
8090 fprintf (file, " (");
8092 if (strstr (IDENTIFIER_POINTER (name), "no_sanitize"))
8093 print_no_sanitize_attr_value (file, TREE_VALUE (chain));
8094 else
8095 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
8096 fprintf (file, ")");
8100 fprintf (file, "))\n");
8103 current_function_decl = fndecl;
8104 if (flags & TDF_GIMPLE)
8106 static bool hotness_bb_param_printed = false;
8107 if (profile_info != NULL
8108 && !hotness_bb_param_printed)
8110 hotness_bb_param_printed = true;
8111 fprintf (file,
8112 "/* --param=gimple-fe-computed-hot-bb-threshold=%" PRId64
8113 " */\n", get_hot_bb_threshold ());
8116 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
8117 dump_flags | TDF_SLIM);
8118 fprintf (file, " __GIMPLE (%s",
8119 (fun->curr_properties & PROP_ssa) ? "ssa"
8120 : (fun->curr_properties & PROP_cfg) ? "cfg"
8121 : "");
8123 if (fun && fun->cfg)
8125 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (fun);
8126 if (bb->count.initialized_p ())
8127 fprintf (file, ",%s(%" PRIu64 ")",
8128 profile_quality_as_string (bb->count.quality ()),
8129 bb->count.value ());
8130 fprintf (file, ")\n%s (", function_name (fun));
8133 else
8135 print_generic_expr (file, TREE_TYPE (fntype), dump_flags);
8136 fprintf (file, " %s %s(", function_name (fun),
8137 tmclone ? "[tm-clone] " : "");
8140 arg = DECL_ARGUMENTS (fndecl);
8141 while (arg)
8143 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
8144 fprintf (file, " ");
8145 print_generic_expr (file, arg, dump_flags);
8146 if (DECL_CHAIN (arg))
8147 fprintf (file, ", ");
8148 arg = DECL_CHAIN (arg);
8150 fprintf (file, ")\n");
8152 dsf = DECL_STRUCT_FUNCTION (fndecl);
8153 if (dsf && (flags & TDF_EH))
8154 dump_eh_tree (file, dsf);
8156 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
8158 dump_node (fndecl, TDF_SLIM | flags, file);
8159 current_function_decl = old_current_fndecl;
8160 return;
8163 /* When GIMPLE is lowered, the variables are no longer available in
8164 BIND_EXPRs, so display them separately. */
8165 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
8167 unsigned ix;
8168 ignore_topmost_bind = true;
8170 fprintf (file, "{\n");
8171 if (gimple_in_ssa_p (fun)
8172 && (flags & TDF_ALIAS))
8174 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
8175 arg = DECL_CHAIN (arg))
8177 tree def = ssa_default_def (fun, arg);
8178 if (def)
8179 dump_default_def (file, def, 2, flags);
8182 tree res = DECL_RESULT (fun->decl);
8183 if (res != NULL_TREE
8184 && DECL_BY_REFERENCE (res))
8186 tree def = ssa_default_def (fun, res);
8187 if (def)
8188 dump_default_def (file, def, 2, flags);
8191 tree static_chain = fun->static_chain_decl;
8192 if (static_chain != NULL_TREE)
8194 tree def = ssa_default_def (fun, static_chain);
8195 if (def)
8196 dump_default_def (file, def, 2, flags);
8200 if (!vec_safe_is_empty (fun->local_decls))
8201 FOR_EACH_LOCAL_DECL (fun, ix, var)
8203 print_generic_decl (file, var, flags);
8204 fprintf (file, "\n");
8206 any_var = true;
8209 tree name;
8211 if (gimple_in_ssa_p (fun))
8212 FOR_EACH_SSA_NAME (ix, name, fun)
8214 if (!SSA_NAME_VAR (name)
8215 /* SSA name with decls without a name still get
8216 dumped as _N, list those explicitely as well even
8217 though we've dumped the decl declaration as D.xxx
8218 above. */
8219 || !SSA_NAME_IDENTIFIER (name))
8221 fprintf (file, " ");
8222 print_generic_expr (file, TREE_TYPE (name), flags);
8223 fprintf (file, " ");
8224 print_generic_expr (file, name, flags);
8225 fprintf (file, ";\n");
8227 any_var = true;
8232 if (fun && fun->decl == fndecl
8233 && fun->cfg
8234 && basic_block_info_for_fn (fun))
8236 /* If the CFG has been built, emit a CFG-based dump. */
8237 if (!ignore_topmost_bind)
8238 fprintf (file, "{\n");
8240 if (any_var && n_basic_blocks_for_fn (fun))
8241 fprintf (file, "\n");
8243 FOR_EACH_BB_FN (bb, fun)
8244 dump_bb (file, bb, 2, flags);
8246 fprintf (file, "}\n");
8248 else if (fun && (fun->curr_properties & PROP_gimple_any))
8250 /* The function is now in GIMPLE form but the CFG has not been
8251 built yet. Emit the single sequence of GIMPLE statements
8252 that make up its body. */
8253 gimple_seq body = gimple_body (fndecl);
8255 if (gimple_seq_first_stmt (body)
8256 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
8257 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
8258 print_gimple_seq (file, body, 0, flags);
8259 else
8261 if (!ignore_topmost_bind)
8262 fprintf (file, "{\n");
8264 if (any_var)
8265 fprintf (file, "\n");
8267 print_gimple_seq (file, body, 2, flags);
8268 fprintf (file, "}\n");
8271 else
8273 int indent;
8275 /* Make a tree based dump. */
8276 chain = DECL_SAVED_TREE (fndecl);
8277 if (chain && TREE_CODE (chain) == BIND_EXPR)
8279 if (ignore_topmost_bind)
8281 chain = BIND_EXPR_BODY (chain);
8282 indent = 2;
8284 else
8285 indent = 0;
8287 else
8289 if (!ignore_topmost_bind)
8291 fprintf (file, "{\n");
8292 /* No topmost bind, pretend it's ignored for later. */
8293 ignore_topmost_bind = true;
8295 indent = 2;
8298 if (any_var)
8299 fprintf (file, "\n");
8301 print_generic_stmt_indented (file, chain, flags, indent);
8302 if (ignore_topmost_bind)
8303 fprintf (file, "}\n");
8306 if (flags & TDF_ENUMERATE_LOCALS)
8307 dump_enumerated_decls (file, flags);
8308 fprintf (file, "\n\n");
8310 current_function_decl = old_current_fndecl;
8313 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
8315 DEBUG_FUNCTION void
8316 debug_function (tree fn, dump_flags_t flags)
8318 dump_function_to_file (fn, stderr, flags);
8322 /* Print on FILE the indexes for the predecessors of basic_block BB. */
8324 static void
8325 print_pred_bbs (FILE *file, basic_block bb)
8327 edge e;
8328 edge_iterator ei;
8330 FOR_EACH_EDGE (e, ei, bb->preds)
8331 fprintf (file, "bb_%d ", e->src->index);
8335 /* Print on FILE the indexes for the successors of basic_block BB. */
8337 static void
8338 print_succ_bbs (FILE *file, basic_block bb)
8340 edge e;
8341 edge_iterator ei;
8343 FOR_EACH_EDGE (e, ei, bb->succs)
8344 fprintf (file, "bb_%d ", e->dest->index);
8347 /* Print to FILE the basic block BB following the VERBOSITY level. */
8349 void
8350 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
8352 char *s_indent = (char *) alloca ((size_t) indent + 1);
8353 memset ((void *) s_indent, ' ', (size_t) indent);
8354 s_indent[indent] = '\0';
8356 /* Print basic_block's header. */
8357 if (verbosity >= 2)
8359 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
8360 print_pred_bbs (file, bb);
8361 fprintf (file, "}, succs = {");
8362 print_succ_bbs (file, bb);
8363 fprintf (file, "})\n");
8366 /* Print basic_block's body. */
8367 if (verbosity >= 3)
8369 fprintf (file, "%s {\n", s_indent);
8370 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
8371 fprintf (file, "%s }\n", s_indent);
8375 static void print_loop_and_siblings (FILE *, class loop *, int, int);
8377 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
8378 VERBOSITY level this outputs the contents of the loop, or just its
8379 structure. */
8381 static void
8382 print_loop (FILE *file, class loop *loop, int indent, int verbosity)
8384 char *s_indent;
8385 basic_block bb;
8387 if (loop == NULL)
8388 return;
8390 s_indent = (char *) alloca ((size_t) indent + 1);
8391 memset ((void *) s_indent, ' ', (size_t) indent);
8392 s_indent[indent] = '\0';
8394 /* Print loop's header. */
8395 fprintf (file, "%sloop_%d (", s_indent, loop->num);
8396 if (loop->header)
8397 fprintf (file, "header = %d", loop->header->index);
8398 else
8400 fprintf (file, "deleted)\n");
8401 return;
8403 if (loop->latch)
8404 fprintf (file, ", latch = %d", loop->latch->index);
8405 else
8406 fprintf (file, ", multiple latches");
8407 fprintf (file, ", niter = ");
8408 print_generic_expr (file, loop->nb_iterations);
8410 if (loop->any_upper_bound)
8412 fprintf (file, ", upper_bound = ");
8413 print_decu (loop->nb_iterations_upper_bound, file);
8415 if (loop->any_likely_upper_bound)
8417 fprintf (file, ", likely_upper_bound = ");
8418 print_decu (loop->nb_iterations_likely_upper_bound, file);
8421 if (loop->any_estimate)
8423 fprintf (file, ", estimate = ");
8424 print_decu (loop->nb_iterations_estimate, file);
8426 if (loop->unroll)
8427 fprintf (file, ", unroll = %d", loop->unroll);
8428 fprintf (file, ")\n");
8430 /* Print loop's body. */
8431 if (verbosity >= 1)
8433 fprintf (file, "%s{\n", s_indent);
8434 FOR_EACH_BB_FN (bb, cfun)
8435 if (bb->loop_father == loop)
8436 print_loops_bb (file, bb, indent, verbosity);
8438 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
8439 fprintf (file, "%s}\n", s_indent);
8443 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8444 spaces. Following VERBOSITY level this outputs the contents of the
8445 loop, or just its structure. */
8447 static void
8448 print_loop_and_siblings (FILE *file, class loop *loop, int indent,
8449 int verbosity)
8451 if (loop == NULL)
8452 return;
8454 print_loop (file, loop, indent, verbosity);
8455 print_loop_and_siblings (file, loop->next, indent, verbosity);
8458 /* Follow a CFG edge from the entry point of the program, and on entry
8459 of a loop, pretty print the loop structure on FILE. */
8461 void
8462 print_loops (FILE *file, int verbosity)
8464 basic_block bb;
8466 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8467 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
8468 if (bb && bb->loop_father)
8469 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
8472 /* Dump a loop. */
8474 DEBUG_FUNCTION void
8475 debug (class loop &ref)
8477 print_loop (stderr, &ref, 0, /*verbosity*/0);
8480 DEBUG_FUNCTION void
8481 debug (class loop *ptr)
8483 if (ptr)
8484 debug (*ptr);
8485 else
8486 fprintf (stderr, "<nil>\n");
8489 /* Dump a loop verbosely. */
8491 DEBUG_FUNCTION void
8492 debug_verbose (class loop &ref)
8494 print_loop (stderr, &ref, 0, /*verbosity*/3);
8497 DEBUG_FUNCTION void
8498 debug_verbose (class loop *ptr)
8500 if (ptr)
8501 debug (*ptr);
8502 else
8503 fprintf (stderr, "<nil>\n");
8507 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8509 DEBUG_FUNCTION void
8510 debug_loops (int verbosity)
8512 print_loops (stderr, verbosity);
8515 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8517 DEBUG_FUNCTION void
8518 debug_loop (class loop *loop, int verbosity)
8520 print_loop (stderr, loop, 0, verbosity);
8523 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8524 level. */
8526 DEBUG_FUNCTION void
8527 debug_loop_num (unsigned num, int verbosity)
8529 debug_loop (get_loop (cfun, num), verbosity);
8532 /* Return true if BB ends with a call, possibly followed by some
8533 instructions that must stay with the call. Return false,
8534 otherwise. */
8536 static bool
8537 gimple_block_ends_with_call_p (basic_block bb)
8539 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8540 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8544 /* Return true if BB ends with a conditional branch. Return false,
8545 otherwise. */
8547 static bool
8548 gimple_block_ends_with_condjump_p (const_basic_block bb)
8550 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
8551 return (stmt && gimple_code (stmt) == GIMPLE_COND);
8555 /* Return true if statement T may terminate execution of BB in ways not
8556 explicitly represtented in the CFG. */
8558 bool
8559 stmt_can_terminate_bb_p (gimple *t)
8561 tree fndecl = NULL_TREE;
8562 int call_flags = 0;
8564 /* Eh exception not handled internally terminates execution of the whole
8565 function. */
8566 if (stmt_can_throw_external (cfun, t))
8567 return true;
8569 /* NORETURN and LONGJMP calls already have an edge to exit.
8570 CONST and PURE calls do not need one.
8571 We don't currently check for CONST and PURE here, although
8572 it would be a good idea, because those attributes are
8573 figured out from the RTL in mark_constant_function, and
8574 the counter incrementation code from -fprofile-arcs
8575 leads to different results from -fbranch-probabilities. */
8576 if (is_gimple_call (t))
8578 fndecl = gimple_call_fndecl (t);
8579 call_flags = gimple_call_flags (t);
8582 if (is_gimple_call (t)
8583 && fndecl
8584 && fndecl_built_in_p (fndecl)
8585 && (call_flags & ECF_NOTHROW)
8586 && !(call_flags & ECF_RETURNS_TWICE)
8587 /* fork() doesn't really return twice, but the effect of
8588 wrapping it in __gcov_fork() which calls __gcov_dump() and
8589 __gcov_reset() and clears the counters before forking has the same
8590 effect as returning twice. Force a fake edge. */
8591 && !fndecl_built_in_p (fndecl, BUILT_IN_FORK))
8592 return false;
8594 if (is_gimple_call (t))
8596 edge_iterator ei;
8597 edge e;
8598 basic_block bb;
8600 if (call_flags & (ECF_PURE | ECF_CONST)
8601 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8602 return false;
8604 /* Function call may do longjmp, terminate program or do other things.
8605 Special case noreturn that have non-abnormal edges out as in this case
8606 the fact is sufficiently represented by lack of edges out of T. */
8607 if (!(call_flags & ECF_NORETURN))
8608 return true;
8610 bb = gimple_bb (t);
8611 FOR_EACH_EDGE (e, ei, bb->succs)
8612 if ((e->flags & EDGE_FAKE) == 0)
8613 return true;
8616 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8617 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8618 return true;
8620 return false;
8624 /* Add fake edges to the function exit for any non constant and non
8625 noreturn calls (or noreturn calls with EH/abnormal edges),
8626 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8627 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8628 that were split.
8630 The goal is to expose cases in which entering a basic block does
8631 not imply that all subsequent instructions must be executed. */
8633 static int
8634 gimple_flow_call_edges_add (sbitmap blocks)
8636 int i;
8637 int blocks_split = 0;
8638 int last_bb = last_basic_block_for_fn (cfun);
8639 bool check_last_block = false;
8641 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8642 return 0;
8644 if (! blocks)
8645 check_last_block = true;
8646 else
8647 check_last_block = bitmap_bit_p (blocks,
8648 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8650 /* In the last basic block, before epilogue generation, there will be
8651 a fallthru edge to EXIT. Special care is required if the last insn
8652 of the last basic block is a call because make_edge folds duplicate
8653 edges, which would result in the fallthru edge also being marked
8654 fake, which would result in the fallthru edge being removed by
8655 remove_fake_edges, which would result in an invalid CFG.
8657 Moreover, we can't elide the outgoing fake edge, since the block
8658 profiler needs to take this into account in order to solve the minimal
8659 spanning tree in the case that the call doesn't return.
8661 Handle this by adding a dummy instruction in a new last basic block. */
8662 if (check_last_block)
8664 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8665 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8666 gimple *t = NULL;
8668 if (!gsi_end_p (gsi))
8669 t = gsi_stmt (gsi);
8671 if (t && stmt_can_terminate_bb_p (t))
8673 edge e;
8675 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8676 if (e)
8678 gsi_insert_on_edge (e, gimple_build_nop ());
8679 gsi_commit_edge_inserts ();
8684 /* Now add fake edges to the function exit for any non constant
8685 calls since there is no way that we can determine if they will
8686 return or not... */
8687 for (i = 0; i < last_bb; i++)
8689 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8690 gimple_stmt_iterator gsi;
8691 gimple *stmt, *last_stmt;
8693 if (!bb)
8694 continue;
8696 if (blocks && !bitmap_bit_p (blocks, i))
8697 continue;
8699 gsi = gsi_last_nondebug_bb (bb);
8700 if (!gsi_end_p (gsi))
8702 last_stmt = gsi_stmt (gsi);
8705 stmt = gsi_stmt (gsi);
8706 if (stmt_can_terminate_bb_p (stmt))
8708 edge e;
8710 /* The handling above of the final block before the
8711 epilogue should be enough to verify that there is
8712 no edge to the exit block in CFG already.
8713 Calling make_edge in such case would cause us to
8714 mark that edge as fake and remove it later. */
8715 if (flag_checking && stmt == last_stmt)
8717 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8718 gcc_assert (e == NULL);
8721 /* Note that the following may create a new basic block
8722 and renumber the existing basic blocks. */
8723 if (stmt != last_stmt)
8725 e = split_block (bb, stmt);
8726 if (e)
8727 blocks_split++;
8729 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8730 e->probability = profile_probability::guessed_never ();
8732 gsi_prev (&gsi);
8734 while (!gsi_end_p (gsi));
8738 if (blocks_split)
8739 checking_verify_flow_info ();
8741 return blocks_split;
8744 /* Removes edge E and all the blocks dominated by it, and updates dominance
8745 information. The IL in E->src needs to be updated separately.
8746 If dominance info is not available, only the edge E is removed.*/
8748 void
8749 remove_edge_and_dominated_blocks (edge e)
8751 vec<basic_block> bbs_to_fix_dom = vNULL;
8752 edge f;
8753 edge_iterator ei;
8754 bool none_removed = false;
8755 unsigned i;
8756 basic_block bb, dbb;
8757 bitmap_iterator bi;
8759 /* If we are removing a path inside a non-root loop that may change
8760 loop ownership of blocks or remove loops. Mark loops for fixup. */
8761 if (current_loops
8762 && loop_outer (e->src->loop_father) != NULL
8763 && e->src->loop_father == e->dest->loop_father)
8764 loops_state_set (LOOPS_NEED_FIXUP);
8766 if (!dom_info_available_p (CDI_DOMINATORS))
8768 remove_edge (e);
8769 return;
8772 /* No updating is needed for edges to exit. */
8773 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8775 if (cfgcleanup_altered_bbs)
8776 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8777 remove_edge (e);
8778 return;
8781 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8782 that is not dominated by E->dest, then this set is empty. Otherwise,
8783 all the basic blocks dominated by E->dest are removed.
8785 Also, to DF_IDOM we store the immediate dominators of the blocks in
8786 the dominance frontier of E (i.e., of the successors of the
8787 removed blocks, if there are any, and of E->dest otherwise). */
8788 FOR_EACH_EDGE (f, ei, e->dest->preds)
8790 if (f == e)
8791 continue;
8793 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8795 none_removed = true;
8796 break;
8800 auto_bitmap df, df_idom;
8801 auto_vec<basic_block> bbs_to_remove;
8802 if (none_removed)
8803 bitmap_set_bit (df_idom,
8804 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8805 else
8807 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8808 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8810 FOR_EACH_EDGE (f, ei, bb->succs)
8812 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8813 bitmap_set_bit (df, f->dest->index);
8816 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8817 bitmap_clear_bit (df, bb->index);
8819 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8821 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8822 bitmap_set_bit (df_idom,
8823 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8827 if (cfgcleanup_altered_bbs)
8829 /* Record the set of the altered basic blocks. */
8830 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8831 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8834 /* Remove E and the cancelled blocks. */
8835 if (none_removed)
8836 remove_edge (e);
8837 else
8839 /* Walk backwards so as to get a chance to substitute all
8840 released DEFs into debug stmts. See
8841 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8842 details. */
8843 for (i = bbs_to_remove.length (); i-- > 0; )
8844 delete_basic_block (bbs_to_remove[i]);
8847 /* Update the dominance information. The immediate dominator may change only
8848 for blocks whose immediate dominator belongs to DF_IDOM:
8850 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8851 removal. Let Z the arbitrary block such that idom(Z) = Y and
8852 Z dominates X after the removal. Before removal, there exists a path P
8853 from Y to X that avoids Z. Let F be the last edge on P that is
8854 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8855 dominates W, and because of P, Z does not dominate W), and W belongs to
8856 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8857 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8859 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8860 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8861 dbb;
8862 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8863 bbs_to_fix_dom.safe_push (dbb);
8866 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8868 bbs_to_fix_dom.release ();
8871 /* Purge dead EH edges from basic block BB. */
8873 bool
8874 gimple_purge_dead_eh_edges (basic_block bb)
8876 bool changed = false;
8877 edge e;
8878 edge_iterator ei;
8879 gimple *stmt = last_stmt (bb);
8881 if (stmt && stmt_can_throw_internal (cfun, stmt))
8882 return false;
8884 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8886 if (e->flags & EDGE_EH)
8888 remove_edge_and_dominated_blocks (e);
8889 changed = true;
8891 else
8892 ei_next (&ei);
8895 return changed;
8898 /* Purge dead EH edges from basic block listed in BLOCKS. */
8900 bool
8901 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8903 bool changed = false;
8904 unsigned i;
8905 bitmap_iterator bi;
8907 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8909 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8911 /* Earlier gimple_purge_dead_eh_edges could have removed
8912 this basic block already. */
8913 gcc_assert (bb || changed);
8914 if (bb != NULL)
8915 changed |= gimple_purge_dead_eh_edges (bb);
8918 return changed;
8921 /* Purge dead abnormal call edges from basic block BB. */
8923 bool
8924 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8926 bool changed = false;
8927 edge e;
8928 edge_iterator ei;
8929 gimple *stmt = last_stmt (bb);
8931 if (!cfun->has_nonlocal_label
8932 && !cfun->calls_setjmp)
8933 return false;
8935 if (stmt && stmt_can_make_abnormal_goto (stmt))
8936 return false;
8938 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8940 if (e->flags & EDGE_ABNORMAL)
8942 if (e->flags & EDGE_FALLTHRU)
8943 e->flags &= ~EDGE_ABNORMAL;
8944 else
8945 remove_edge_and_dominated_blocks (e);
8946 changed = true;
8948 else
8949 ei_next (&ei);
8952 return changed;
8955 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8957 bool
8958 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8960 bool changed = false;
8961 unsigned i;
8962 bitmap_iterator bi;
8964 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8966 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8968 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8969 this basic block already. */
8970 gcc_assert (bb || changed);
8971 if (bb != NULL)
8972 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8975 return changed;
8978 /* This function is called whenever a new edge is created or
8979 redirected. */
8981 static void
8982 gimple_execute_on_growing_pred (edge e)
8984 basic_block bb = e->dest;
8986 if (!gimple_seq_empty_p (phi_nodes (bb)))
8987 reserve_phi_args_for_new_edge (bb);
8990 /* This function is called immediately before edge E is removed from
8991 the edge vector E->dest->preds. */
8993 static void
8994 gimple_execute_on_shrinking_pred (edge e)
8996 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8997 remove_phi_args (e);
9000 /*---------------------------------------------------------------------------
9001 Helper functions for Loop versioning
9002 ---------------------------------------------------------------------------*/
9004 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
9005 of 'first'. Both of them are dominated by 'new_head' basic block. When
9006 'new_head' was created by 'second's incoming edge it received phi arguments
9007 on the edge by split_edge(). Later, additional edge 'e' was created to
9008 connect 'new_head' and 'first'. Now this routine adds phi args on this
9009 additional edge 'e' that new_head to second edge received as part of edge
9010 splitting. */
9012 static void
9013 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
9014 basic_block new_head, edge e)
9016 gphi *phi1, *phi2;
9017 gphi_iterator psi1, psi2;
9018 tree def;
9019 edge e2 = find_edge (new_head, second);
9021 /* Because NEW_HEAD has been created by splitting SECOND's incoming
9022 edge, we should always have an edge from NEW_HEAD to SECOND. */
9023 gcc_assert (e2 != NULL);
9025 /* Browse all 'second' basic block phi nodes and add phi args to
9026 edge 'e' for 'first' head. PHI args are always in correct order. */
9028 for (psi2 = gsi_start_phis (second),
9029 psi1 = gsi_start_phis (first);
9030 !gsi_end_p (psi2) && !gsi_end_p (psi1);
9031 gsi_next (&psi2), gsi_next (&psi1))
9033 phi1 = psi1.phi ();
9034 phi2 = psi2.phi ();
9035 def = PHI_ARG_DEF (phi2, e2->dest_idx);
9036 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
9041 /* Adds a if else statement to COND_BB with condition COND_EXPR.
9042 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
9043 the destination of the ELSE part. */
9045 static void
9046 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
9047 basic_block second_head ATTRIBUTE_UNUSED,
9048 basic_block cond_bb, void *cond_e)
9050 gimple_stmt_iterator gsi;
9051 gimple *new_cond_expr;
9052 tree cond_expr = (tree) cond_e;
9053 edge e0;
9055 /* Build new conditional expr */
9056 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
9057 NULL_TREE, NULL_TREE);
9059 /* Add new cond in cond_bb. */
9060 gsi = gsi_last_bb (cond_bb);
9061 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
9063 /* Adjust edges appropriately to connect new head with first head
9064 as well as second head. */
9065 e0 = single_succ_edge (cond_bb);
9066 e0->flags &= ~EDGE_FALLTHRU;
9067 e0->flags |= EDGE_FALSE_VALUE;
9071 /* Do book-keeping of basic block BB for the profile consistency checker.
9072 Store the counting in RECORD. */
9073 static void
9074 gimple_account_profile_record (basic_block bb,
9075 struct profile_record *record)
9077 gimple_stmt_iterator i;
9078 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
9080 record->size
9081 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
9082 if (bb->count.initialized_p ())
9083 record->time
9084 += estimate_num_insns (gsi_stmt (i),
9085 &eni_time_weights) * bb->count.to_gcov_type ();
9086 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
9087 record->time
9088 += estimate_num_insns (gsi_stmt (i),
9089 &eni_time_weights) * bb->count.to_frequency (cfun);
9093 struct cfg_hooks gimple_cfg_hooks = {
9094 "gimple",
9095 gimple_verify_flow_info,
9096 gimple_dump_bb, /* dump_bb */
9097 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
9098 create_bb, /* create_basic_block */
9099 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
9100 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
9101 gimple_can_remove_branch_p, /* can_remove_branch_p */
9102 remove_bb, /* delete_basic_block */
9103 gimple_split_block, /* split_block */
9104 gimple_move_block_after, /* move_block_after */
9105 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
9106 gimple_merge_blocks, /* merge_blocks */
9107 gimple_predict_edge, /* predict_edge */
9108 gimple_predicted_by_p, /* predicted_by_p */
9109 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
9110 gimple_duplicate_bb, /* duplicate_block */
9111 gimple_split_edge, /* split_edge */
9112 gimple_make_forwarder_block, /* make_forward_block */
9113 NULL, /* tidy_fallthru_edge */
9114 NULL, /* force_nonfallthru */
9115 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
9116 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
9117 gimple_flow_call_edges_add, /* flow_call_edges_add */
9118 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
9119 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
9120 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
9121 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
9122 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
9123 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
9124 flush_pending_stmts, /* flush_pending_stmts */
9125 gimple_empty_block_p, /* block_empty_p */
9126 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
9127 gimple_account_profile_record,
9131 /* Split all critical edges. Split some extra (not necessarily critical) edges
9132 if FOR_EDGE_INSERTION_P is true. */
9134 unsigned int
9135 split_critical_edges (bool for_edge_insertion_p /* = false */)
9137 basic_block bb;
9138 edge e;
9139 edge_iterator ei;
9141 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
9142 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
9143 mappings around the calls to split_edge. */
9144 start_recording_case_labels ();
9145 FOR_ALL_BB_FN (bb, cfun)
9147 FOR_EACH_EDGE (e, ei, bb->succs)
9149 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
9150 split_edge (e);
9151 /* PRE inserts statements to edges and expects that
9152 since split_critical_edges was done beforehand, committing edge
9153 insertions will not split more edges. In addition to critical
9154 edges we must split edges that have multiple successors and
9155 end by control flow statements, such as RESX.
9156 Go ahead and split them too. This matches the logic in
9157 gimple_find_edge_insert_loc. */
9158 else if (for_edge_insertion_p
9159 && (!single_pred_p (e->dest)
9160 || !gimple_seq_empty_p (phi_nodes (e->dest))
9161 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
9162 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
9163 && !(e->flags & EDGE_ABNORMAL))
9165 gimple_stmt_iterator gsi;
9167 gsi = gsi_last_bb (e->src);
9168 if (!gsi_end_p (gsi)
9169 && stmt_ends_bb_p (gsi_stmt (gsi))
9170 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
9171 && !gimple_call_builtin_p (gsi_stmt (gsi),
9172 BUILT_IN_RETURN)))
9173 split_edge (e);
9177 end_recording_case_labels ();
9178 return 0;
9181 namespace {
9183 const pass_data pass_data_split_crit_edges =
9185 GIMPLE_PASS, /* type */
9186 "crited", /* name */
9187 OPTGROUP_NONE, /* optinfo_flags */
9188 TV_TREE_SPLIT_EDGES, /* tv_id */
9189 PROP_cfg, /* properties_required */
9190 PROP_no_crit_edges, /* properties_provided */
9191 0, /* properties_destroyed */
9192 0, /* todo_flags_start */
9193 0, /* todo_flags_finish */
9196 class pass_split_crit_edges : public gimple_opt_pass
9198 public:
9199 pass_split_crit_edges (gcc::context *ctxt)
9200 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
9203 /* opt_pass methods: */
9204 virtual unsigned int execute (function *) { return split_critical_edges (); }
9206 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
9207 }; // class pass_split_crit_edges
9209 } // anon namespace
9211 gimple_opt_pass *
9212 make_pass_split_crit_edges (gcc::context *ctxt)
9214 return new pass_split_crit_edges (ctxt);
9218 /* Insert COND expression which is GIMPLE_COND after STMT
9219 in basic block BB with appropriate basic block split
9220 and creation of a new conditionally executed basic block.
9221 Update profile so the new bb is visited with probability PROB.
9222 Return created basic block. */
9223 basic_block
9224 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond,
9225 profile_probability prob)
9227 edge fall = split_block (bb, stmt);
9228 gimple_stmt_iterator iter = gsi_last_bb (bb);
9229 basic_block new_bb;
9231 /* Insert cond statement. */
9232 gcc_assert (gimple_code (cond) == GIMPLE_COND);
9233 if (gsi_end_p (iter))
9234 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
9235 else
9236 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
9238 /* Create conditionally executed block. */
9239 new_bb = create_empty_bb (bb);
9240 edge e = make_edge (bb, new_bb, EDGE_TRUE_VALUE);
9241 e->probability = prob;
9242 new_bb->count = e->count ();
9243 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
9245 /* Fix edge for split bb. */
9246 fall->flags = EDGE_FALSE_VALUE;
9247 fall->probability -= e->probability;
9249 /* Update dominance info. */
9250 if (dom_info_available_p (CDI_DOMINATORS))
9252 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
9253 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
9256 /* Update loop info. */
9257 if (current_loops)
9258 add_bb_to_loop (new_bb, bb->loop_father);
9260 return new_bb;
9265 /* Given a basic block B which ends with a conditional and has
9266 precisely two successors, determine which of the edges is taken if
9267 the conditional is true and which is taken if the conditional is
9268 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
9270 void
9271 extract_true_false_edges_from_block (basic_block b,
9272 edge *true_edge,
9273 edge *false_edge)
9275 edge e = EDGE_SUCC (b, 0);
9277 if (e->flags & EDGE_TRUE_VALUE)
9279 *true_edge = e;
9280 *false_edge = EDGE_SUCC (b, 1);
9282 else
9284 *false_edge = e;
9285 *true_edge = EDGE_SUCC (b, 1);
9290 /* From a controlling predicate in the immediate dominator DOM of
9291 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
9292 predicate evaluates to true and false and store them to
9293 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
9294 they are non-NULL. Returns true if the edges can be determined,
9295 else return false. */
9297 bool
9298 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
9299 edge *true_controlled_edge,
9300 edge *false_controlled_edge)
9302 basic_block bb = phiblock;
9303 edge true_edge, false_edge, tem;
9304 edge e0 = NULL, e1 = NULL;
9306 /* We have to verify that one edge into the PHI node is dominated
9307 by the true edge of the predicate block and the other edge
9308 dominated by the false edge. This ensures that the PHI argument
9309 we are going to take is completely determined by the path we
9310 take from the predicate block.
9311 We can only use BB dominance checks below if the destination of
9312 the true/false edges are dominated by their edge, thus only
9313 have a single predecessor. */
9314 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
9315 tem = EDGE_PRED (bb, 0);
9316 if (tem == true_edge
9317 || (single_pred_p (true_edge->dest)
9318 && (tem->src == true_edge->dest
9319 || dominated_by_p (CDI_DOMINATORS,
9320 tem->src, true_edge->dest))))
9321 e0 = tem;
9322 else if (tem == false_edge
9323 || (single_pred_p (false_edge->dest)
9324 && (tem->src == false_edge->dest
9325 || dominated_by_p (CDI_DOMINATORS,
9326 tem->src, false_edge->dest))))
9327 e1 = tem;
9328 else
9329 return false;
9330 tem = EDGE_PRED (bb, 1);
9331 if (tem == true_edge
9332 || (single_pred_p (true_edge->dest)
9333 && (tem->src == true_edge->dest
9334 || dominated_by_p (CDI_DOMINATORS,
9335 tem->src, true_edge->dest))))
9336 e0 = tem;
9337 else if (tem == false_edge
9338 || (single_pred_p (false_edge->dest)
9339 && (tem->src == false_edge->dest
9340 || dominated_by_p (CDI_DOMINATORS,
9341 tem->src, false_edge->dest))))
9342 e1 = tem;
9343 else
9344 return false;
9345 if (!e0 || !e1)
9346 return false;
9348 if (true_controlled_edge)
9349 *true_controlled_edge = e0;
9350 if (false_controlled_edge)
9351 *false_controlled_edge = e1;
9353 return true;
9356 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9357 range [low, high]. Place associated stmts before *GSI. */
9359 void
9360 generate_range_test (basic_block bb, tree index, tree low, tree high,
9361 tree *lhs, tree *rhs)
9363 tree type = TREE_TYPE (index);
9364 tree utype = range_check_type (type);
9366 low = fold_convert (utype, low);
9367 high = fold_convert (utype, high);
9369 gimple_seq seq = NULL;
9370 index = gimple_convert (&seq, utype, index);
9371 *lhs = gimple_build (&seq, MINUS_EXPR, utype, index, low);
9372 *rhs = const_binop (MINUS_EXPR, utype, high, low);
9374 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9375 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
9378 /* Return the basic block that belongs to label numbered INDEX
9379 of a switch statement. */
9381 basic_block
9382 gimple_switch_label_bb (function *ifun, gswitch *gs, unsigned index)
9384 return label_to_block (ifun, CASE_LABEL (gimple_switch_label (gs, index)));
9387 /* Return the default basic block of a switch statement. */
9389 basic_block
9390 gimple_switch_default_bb (function *ifun, gswitch *gs)
9392 return gimple_switch_label_bb (ifun, gs, 0);
9395 /* Return the edge that belongs to label numbered INDEX
9396 of a switch statement. */
9398 edge
9399 gimple_switch_edge (function *ifun, gswitch *gs, unsigned index)
9401 return find_edge (gimple_bb (gs), gimple_switch_label_bb (ifun, gs, index));
9404 /* Return the default edge of a switch statement. */
9406 edge
9407 gimple_switch_default_edge (function *ifun, gswitch *gs)
9409 return gimple_switch_edge (ifun, gs, 0);
9413 /* Emit return warnings. */
9415 namespace {
9417 const pass_data pass_data_warn_function_return =
9419 GIMPLE_PASS, /* type */
9420 "*warn_function_return", /* name */
9421 OPTGROUP_NONE, /* optinfo_flags */
9422 TV_NONE, /* tv_id */
9423 PROP_cfg, /* properties_required */
9424 0, /* properties_provided */
9425 0, /* properties_destroyed */
9426 0, /* todo_flags_start */
9427 0, /* todo_flags_finish */
9430 class pass_warn_function_return : public gimple_opt_pass
9432 public:
9433 pass_warn_function_return (gcc::context *ctxt)
9434 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
9437 /* opt_pass methods: */
9438 virtual unsigned int execute (function *);
9440 }; // class pass_warn_function_return
9442 unsigned int
9443 pass_warn_function_return::execute (function *fun)
9445 location_t location;
9446 gimple *last;
9447 edge e;
9448 edge_iterator ei;
9450 if (!targetm.warn_func_return (fun->decl))
9451 return 0;
9453 /* If we have a path to EXIT, then we do return. */
9454 if (TREE_THIS_VOLATILE (fun->decl)
9455 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
9457 location = UNKNOWN_LOCATION;
9458 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (fun)->preds);
9459 (e = ei_safe_edge (ei)); )
9461 last = last_stmt (e->src);
9462 if ((gimple_code (last) == GIMPLE_RETURN
9463 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
9464 && location == UNKNOWN_LOCATION
9465 && ((location = LOCATION_LOCUS (gimple_location (last)))
9466 != UNKNOWN_LOCATION)
9467 && !optimize)
9468 break;
9469 /* When optimizing, replace return stmts in noreturn functions
9470 with __builtin_unreachable () call. */
9471 if (optimize && gimple_code (last) == GIMPLE_RETURN)
9473 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9474 gimple *new_stmt = gimple_build_call (fndecl, 0);
9475 gimple_set_location (new_stmt, gimple_location (last));
9476 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9477 gsi_replace (&gsi, new_stmt, true);
9478 remove_edge (e);
9480 else
9481 ei_next (&ei);
9483 if (location == UNKNOWN_LOCATION)
9484 location = cfun->function_end_locus;
9485 warning_at (location, 0, "%<noreturn%> function does return");
9488 /* If we see "return;" in some basic block, then we do reach the end
9489 without returning a value. */
9490 else if (warn_return_type > 0
9491 && !warning_suppressed_p (fun->decl, OPT_Wreturn_type)
9492 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
9494 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
9496 gimple *last = last_stmt (e->src);
9497 greturn *return_stmt = dyn_cast <greturn *> (last);
9498 if (return_stmt
9499 && gimple_return_retval (return_stmt) == NULL
9500 && !warning_suppressed_p (last, OPT_Wreturn_type))
9502 location = gimple_location (last);
9503 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9504 location = fun->function_end_locus;
9505 if (warning_at (location, OPT_Wreturn_type,
9506 "control reaches end of non-void function"))
9507 suppress_warning (fun->decl, OPT_Wreturn_type);
9508 break;
9511 /* The C++ FE turns fallthrough from the end of non-void function
9512 into __builtin_unreachable () call with BUILTINS_LOCATION.
9513 Recognize those too. */
9514 basic_block bb;
9515 if (!warning_suppressed_p (fun->decl, OPT_Wreturn_type))
9516 FOR_EACH_BB_FN (bb, fun)
9517 if (EDGE_COUNT (bb->succs) == 0)
9519 gimple *last = last_stmt (bb);
9520 const enum built_in_function ubsan_missing_ret
9521 = BUILT_IN_UBSAN_HANDLE_MISSING_RETURN;
9522 if (last
9523 && ((LOCATION_LOCUS (gimple_location (last))
9524 == BUILTINS_LOCATION
9525 && gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE))
9526 || gimple_call_builtin_p (last, ubsan_missing_ret)))
9528 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9529 gsi_prev_nondebug (&gsi);
9530 gimple *prev = gsi_stmt (gsi);
9531 if (prev == NULL)
9532 location = UNKNOWN_LOCATION;
9533 else
9534 location = gimple_location (prev);
9535 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9536 location = fun->function_end_locus;
9537 if (warning_at (location, OPT_Wreturn_type,
9538 "control reaches end of non-void function"))
9539 suppress_warning (fun->decl, OPT_Wreturn_type);
9540 break;
9544 return 0;
9547 } // anon namespace
9549 gimple_opt_pass *
9550 make_pass_warn_function_return (gcc::context *ctxt)
9552 return new pass_warn_function_return (ctxt);
9555 /* Walk a gimplified function and warn for functions whose return value is
9556 ignored and attribute((warn_unused_result)) is set. This is done before
9557 inlining, so we don't have to worry about that. */
9559 static void
9560 do_warn_unused_result (gimple_seq seq)
9562 tree fdecl, ftype;
9563 gimple_stmt_iterator i;
9565 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
9567 gimple *g = gsi_stmt (i);
9569 switch (gimple_code (g))
9571 case GIMPLE_BIND:
9572 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
9573 break;
9574 case GIMPLE_TRY:
9575 do_warn_unused_result (gimple_try_eval (g));
9576 do_warn_unused_result (gimple_try_cleanup (g));
9577 break;
9578 case GIMPLE_CATCH:
9579 do_warn_unused_result (gimple_catch_handler (
9580 as_a <gcatch *> (g)));
9581 break;
9582 case GIMPLE_EH_FILTER:
9583 do_warn_unused_result (gimple_eh_filter_failure (g));
9584 break;
9586 case GIMPLE_CALL:
9587 if (gimple_call_lhs (g))
9588 break;
9589 if (gimple_call_internal_p (g))
9590 break;
9592 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9593 LHS. All calls whose value is ignored should be
9594 represented like this. Look for the attribute. */
9595 fdecl = gimple_call_fndecl (g);
9596 ftype = gimple_call_fntype (g);
9598 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9600 location_t loc = gimple_location (g);
9602 if (fdecl)
9603 warning_at (loc, OPT_Wunused_result,
9604 "ignoring return value of %qD "
9605 "declared with attribute %<warn_unused_result%>",
9606 fdecl);
9607 else
9608 warning_at (loc, OPT_Wunused_result,
9609 "ignoring return value of function "
9610 "declared with attribute %<warn_unused_result%>");
9612 break;
9614 default:
9615 /* Not a container, not a call, or a call whose value is used. */
9616 break;
9621 namespace {
9623 const pass_data pass_data_warn_unused_result =
9625 GIMPLE_PASS, /* type */
9626 "*warn_unused_result", /* name */
9627 OPTGROUP_NONE, /* optinfo_flags */
9628 TV_NONE, /* tv_id */
9629 PROP_gimple_any, /* properties_required */
9630 0, /* properties_provided */
9631 0, /* properties_destroyed */
9632 0, /* todo_flags_start */
9633 0, /* todo_flags_finish */
9636 class pass_warn_unused_result : public gimple_opt_pass
9638 public:
9639 pass_warn_unused_result (gcc::context *ctxt)
9640 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9643 /* opt_pass methods: */
9644 virtual bool gate (function *) { return flag_warn_unused_result; }
9645 virtual unsigned int execute (function *)
9647 do_warn_unused_result (gimple_body (current_function_decl));
9648 return 0;
9651 }; // class pass_warn_unused_result
9653 } // anon namespace
9655 gimple_opt_pass *
9656 make_pass_warn_unused_result (gcc::context *ctxt)
9658 return new pass_warn_unused_result (ctxt);
9661 /* IPA passes, compilation of earlier functions or inlining
9662 might have changed some properties, such as marked functions nothrow,
9663 pure, const or noreturn.
9664 Remove redundant edges and basic blocks, and create new ones if necessary.
9666 This pass can't be executed as stand alone pass from pass manager, because
9667 in between inlining and this fixup the verify_flow_info would fail. */
9669 unsigned int
9670 execute_fixup_cfg (void)
9672 basic_block bb;
9673 gimple_stmt_iterator gsi;
9674 int todo = 0;
9675 cgraph_node *node = cgraph_node::get (current_function_decl);
9676 /* Same scaling is also done by ipa_merge_profiles. */
9677 profile_count num = node->count;
9678 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
9679 bool scale = num.initialized_p () && !(num == den);
9681 if (scale)
9683 profile_count::adjust_for_ipa_scaling (&num, &den);
9684 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9685 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9686 = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
9689 FOR_EACH_BB_FN (bb, cfun)
9691 if (scale)
9692 bb->count = bb->count.apply_scale (num, den);
9693 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9695 gimple *stmt = gsi_stmt (gsi);
9696 tree decl = is_gimple_call (stmt)
9697 ? gimple_call_fndecl (stmt)
9698 : NULL;
9699 if (decl)
9701 int flags = gimple_call_flags (stmt);
9702 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9704 if (gimple_purge_dead_abnormal_call_edges (bb))
9705 todo |= TODO_cleanup_cfg;
9707 if (gimple_in_ssa_p (cfun))
9709 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9710 update_stmt (stmt);
9714 if (flags & ECF_NORETURN
9715 && fixup_noreturn_call (stmt))
9716 todo |= TODO_cleanup_cfg;
9719 /* Remove stores to variables we marked write-only.
9720 Keep access when store has side effect, i.e. in case when source
9721 is volatile. */
9722 if (gimple_store_p (stmt)
9723 && !gimple_has_side_effects (stmt)
9724 && !optimize_debug)
9726 tree lhs = get_base_address (gimple_get_lhs (stmt));
9728 if (VAR_P (lhs)
9729 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9730 && varpool_node::get (lhs)->writeonly)
9732 unlink_stmt_vdef (stmt);
9733 gsi_remove (&gsi, true);
9734 release_defs (stmt);
9735 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9736 continue;
9739 /* For calls we can simply remove LHS when it is known
9740 to be write-only. */
9741 if (is_gimple_call (stmt)
9742 && gimple_get_lhs (stmt))
9744 tree lhs = get_base_address (gimple_get_lhs (stmt));
9746 if (VAR_P (lhs)
9747 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9748 && varpool_node::get (lhs)->writeonly)
9750 gimple_call_set_lhs (stmt, NULL);
9751 update_stmt (stmt);
9752 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9756 if (maybe_clean_eh_stmt (stmt)
9757 && gimple_purge_dead_eh_edges (bb))
9758 todo |= TODO_cleanup_cfg;
9759 gsi_next (&gsi);
9762 /* If we have a basic block with no successors that does not
9763 end with a control statement or a noreturn call end it with
9764 a call to __builtin_unreachable. This situation can occur
9765 when inlining a noreturn call that does in fact return. */
9766 if (EDGE_COUNT (bb->succs) == 0)
9768 gimple *stmt = last_stmt (bb);
9769 if (!stmt
9770 || (!is_ctrl_stmt (stmt)
9771 && (!is_gimple_call (stmt)
9772 || !gimple_call_noreturn_p (stmt))))
9774 if (stmt && is_gimple_call (stmt))
9775 gimple_call_set_ctrl_altering (stmt, false);
9776 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9777 stmt = gimple_build_call (fndecl, 0);
9778 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9779 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9780 if (!cfun->after_inlining)
9782 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9783 node->create_edge (cgraph_node::get_create (fndecl),
9784 call_stmt, bb->count);
9789 if (scale)
9791 update_max_bb_count ();
9792 compute_function_frequency ();
9795 if (current_loops
9796 && (todo & TODO_cleanup_cfg))
9797 loops_state_set (LOOPS_NEED_FIXUP);
9799 return todo;
9802 namespace {
9804 const pass_data pass_data_fixup_cfg =
9806 GIMPLE_PASS, /* type */
9807 "fixup_cfg", /* name */
9808 OPTGROUP_NONE, /* optinfo_flags */
9809 TV_NONE, /* tv_id */
9810 PROP_cfg, /* properties_required */
9811 0, /* properties_provided */
9812 0, /* properties_destroyed */
9813 0, /* todo_flags_start */
9814 0, /* todo_flags_finish */
9817 class pass_fixup_cfg : public gimple_opt_pass
9819 public:
9820 pass_fixup_cfg (gcc::context *ctxt)
9821 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9824 /* opt_pass methods: */
9825 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
9826 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9828 }; // class pass_fixup_cfg
9830 } // anon namespace
9832 gimple_opt_pass *
9833 make_pass_fixup_cfg (gcc::context *ctxt)
9835 return new pass_fixup_cfg (ctxt);
9838 /* Garbage collection support for edge_def. */
9840 extern void gt_ggc_mx (tree&);
9841 extern void gt_ggc_mx (gimple *&);
9842 extern void gt_ggc_mx (rtx&);
9843 extern void gt_ggc_mx (basic_block&);
9845 static void
9846 gt_ggc_mx (rtx_insn *& x)
9848 if (x)
9849 gt_ggc_mx_rtx_def ((void *) x);
9852 void
9853 gt_ggc_mx (edge_def *e)
9855 tree block = LOCATION_BLOCK (e->goto_locus);
9856 gt_ggc_mx (e->src);
9857 gt_ggc_mx (e->dest);
9858 if (current_ir_type () == IR_GIMPLE)
9859 gt_ggc_mx (e->insns.g);
9860 else
9861 gt_ggc_mx (e->insns.r);
9862 gt_ggc_mx (block);
9865 /* PCH support for edge_def. */
9867 extern void gt_pch_nx (tree&);
9868 extern void gt_pch_nx (gimple *&);
9869 extern void gt_pch_nx (rtx&);
9870 extern void gt_pch_nx (basic_block&);
9872 static void
9873 gt_pch_nx (rtx_insn *& x)
9875 if (x)
9876 gt_pch_nx_rtx_def ((void *) x);
9879 void
9880 gt_pch_nx (edge_def *e)
9882 tree block = LOCATION_BLOCK (e->goto_locus);
9883 gt_pch_nx (e->src);
9884 gt_pch_nx (e->dest);
9885 if (current_ir_type () == IR_GIMPLE)
9886 gt_pch_nx (e->insns.g);
9887 else
9888 gt_pch_nx (e->insns.r);
9889 gt_pch_nx (block);
9892 void
9893 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9895 tree block = LOCATION_BLOCK (e->goto_locus);
9896 op (&(e->src), cookie);
9897 op (&(e->dest), cookie);
9898 if (current_ir_type () == IR_GIMPLE)
9899 op (&(e->insns.g), cookie);
9900 else
9901 op (&(e->insns.r), cookie);
9902 op (&(block), cookie);
9905 #if CHECKING_P
9907 namespace selftest {
9909 /* Helper function for CFG selftests: create a dummy function decl
9910 and push it as cfun. */
9912 static tree
9913 push_fndecl (const char *name)
9915 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9916 /* FIXME: this uses input_location: */
9917 tree fndecl = build_fn_decl (name, fn_type);
9918 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9919 NULL_TREE, integer_type_node);
9920 DECL_RESULT (fndecl) = retval;
9921 push_struct_function (fndecl);
9922 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9923 ASSERT_TRUE (fun != NULL);
9924 init_empty_tree_cfg_for_function (fun);
9925 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9926 ASSERT_EQ (0, n_edges_for_fn (fun));
9927 return fndecl;
9930 /* These tests directly create CFGs.
9931 Compare with the static fns within tree-cfg.c:
9932 - build_gimple_cfg
9933 - make_blocks: calls create_basic_block (seq, bb);
9934 - make_edges. */
9936 /* Verify a simple cfg of the form:
9937 ENTRY -> A -> B -> C -> EXIT. */
9939 static void
9940 test_linear_chain ()
9942 gimple_register_cfg_hooks ();
9944 tree fndecl = push_fndecl ("cfg_test_linear_chain");
9945 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9947 /* Create some empty blocks. */
9948 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9949 basic_block bb_b = create_empty_bb (bb_a);
9950 basic_block bb_c = create_empty_bb (bb_b);
9952 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
9953 ASSERT_EQ (0, n_edges_for_fn (fun));
9955 /* Create some edges: a simple linear chain of BBs. */
9956 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9957 make_edge (bb_a, bb_b, 0);
9958 make_edge (bb_b, bb_c, 0);
9959 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9961 /* Verify the edges. */
9962 ASSERT_EQ (4, n_edges_for_fn (fun));
9963 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
9964 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
9965 ASSERT_EQ (1, bb_a->preds->length ());
9966 ASSERT_EQ (1, bb_a->succs->length ());
9967 ASSERT_EQ (1, bb_b->preds->length ());
9968 ASSERT_EQ (1, bb_b->succs->length ());
9969 ASSERT_EQ (1, bb_c->preds->length ());
9970 ASSERT_EQ (1, bb_c->succs->length ());
9971 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
9972 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
9974 /* Verify the dominance information
9975 Each BB in our simple chain should be dominated by the one before
9976 it. */
9977 calculate_dominance_info (CDI_DOMINATORS);
9978 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9979 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9980 auto_vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9981 ASSERT_EQ (1, dom_by_b.length ());
9982 ASSERT_EQ (bb_c, dom_by_b[0]);
9983 free_dominance_info (CDI_DOMINATORS);
9985 /* Similarly for post-dominance: each BB in our chain is post-dominated
9986 by the one after it. */
9987 calculate_dominance_info (CDI_POST_DOMINATORS);
9988 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9989 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9990 auto_vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9991 ASSERT_EQ (1, postdom_by_b.length ());
9992 ASSERT_EQ (bb_a, postdom_by_b[0]);
9993 free_dominance_info (CDI_POST_DOMINATORS);
9995 pop_cfun ();
9998 /* Verify a simple CFG of the form:
9999 ENTRY
10003 /t \f
10009 EXIT. */
10011 static void
10012 test_diamond ()
10014 gimple_register_cfg_hooks ();
10016 tree fndecl = push_fndecl ("cfg_test_diamond");
10017 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10019 /* Create some empty blocks. */
10020 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
10021 basic_block bb_b = create_empty_bb (bb_a);
10022 basic_block bb_c = create_empty_bb (bb_a);
10023 basic_block bb_d = create_empty_bb (bb_b);
10025 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
10026 ASSERT_EQ (0, n_edges_for_fn (fun));
10028 /* Create the edges. */
10029 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
10030 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
10031 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
10032 make_edge (bb_b, bb_d, 0);
10033 make_edge (bb_c, bb_d, 0);
10034 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10036 /* Verify the edges. */
10037 ASSERT_EQ (6, n_edges_for_fn (fun));
10038 ASSERT_EQ (1, bb_a->preds->length ());
10039 ASSERT_EQ (2, bb_a->succs->length ());
10040 ASSERT_EQ (1, bb_b->preds->length ());
10041 ASSERT_EQ (1, bb_b->succs->length ());
10042 ASSERT_EQ (1, bb_c->preds->length ());
10043 ASSERT_EQ (1, bb_c->succs->length ());
10044 ASSERT_EQ (2, bb_d->preds->length ());
10045 ASSERT_EQ (1, bb_d->succs->length ());
10047 /* Verify the dominance information. */
10048 calculate_dominance_info (CDI_DOMINATORS);
10049 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
10050 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
10051 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
10052 auto_vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
10053 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
10054 dom_by_a.release ();
10055 auto_vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
10056 ASSERT_EQ (0, dom_by_b.length ());
10057 dom_by_b.release ();
10058 free_dominance_info (CDI_DOMINATORS);
10060 /* Similarly for post-dominance. */
10061 calculate_dominance_info (CDI_POST_DOMINATORS);
10062 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
10063 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
10064 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
10065 auto_vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
10066 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
10067 postdom_by_d.release ();
10068 auto_vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
10069 ASSERT_EQ (0, postdom_by_b.length ());
10070 postdom_by_b.release ();
10071 free_dominance_info (CDI_POST_DOMINATORS);
10073 pop_cfun ();
10076 /* Verify that we can handle a CFG containing a "complete" aka
10077 fully-connected subgraph (where A B C D below all have edges
10078 pointing to each other node, also to themselves).
10079 e.g.:
10080 ENTRY EXIT
10086 A<--->B
10087 ^^ ^^
10088 | \ / |
10089 | X |
10090 | / \ |
10091 VV VV
10092 C<--->D
10095 static void
10096 test_fully_connected ()
10098 gimple_register_cfg_hooks ();
10100 tree fndecl = push_fndecl ("cfg_fully_connected");
10101 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10103 const int n = 4;
10105 /* Create some empty blocks. */
10106 auto_vec <basic_block> subgraph_nodes;
10107 for (int i = 0; i < n; i++)
10108 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
10110 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
10111 ASSERT_EQ (0, n_edges_for_fn (fun));
10113 /* Create the edges. */
10114 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
10115 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10116 for (int i = 0; i < n; i++)
10117 for (int j = 0; j < n; j++)
10118 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
10120 /* Verify the edges. */
10121 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
10122 /* The first one is linked to ENTRY/EXIT as well as itself and
10123 everything else. */
10124 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
10125 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
10126 /* The other ones in the subgraph are linked to everything in
10127 the subgraph (including themselves). */
10128 for (int i = 1; i < n; i++)
10130 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
10131 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
10134 /* Verify the dominance information. */
10135 calculate_dominance_info (CDI_DOMINATORS);
10136 /* The initial block in the subgraph should be dominated by ENTRY. */
10137 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
10138 get_immediate_dominator (CDI_DOMINATORS,
10139 subgraph_nodes[0]));
10140 /* Every other block in the subgraph should be dominated by the
10141 initial block. */
10142 for (int i = 1; i < n; i++)
10143 ASSERT_EQ (subgraph_nodes[0],
10144 get_immediate_dominator (CDI_DOMINATORS,
10145 subgraph_nodes[i]));
10146 free_dominance_info (CDI_DOMINATORS);
10148 /* Similarly for post-dominance. */
10149 calculate_dominance_info (CDI_POST_DOMINATORS);
10150 /* The initial block in the subgraph should be postdominated by EXIT. */
10151 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
10152 get_immediate_dominator (CDI_POST_DOMINATORS,
10153 subgraph_nodes[0]));
10154 /* Every other block in the subgraph should be postdominated by the
10155 initial block, since that leads to EXIT. */
10156 for (int i = 1; i < n; i++)
10157 ASSERT_EQ (subgraph_nodes[0],
10158 get_immediate_dominator (CDI_POST_DOMINATORS,
10159 subgraph_nodes[i]));
10160 free_dominance_info (CDI_POST_DOMINATORS);
10162 pop_cfun ();
10165 /* Run all of the selftests within this file. */
10167 void
10168 tree_cfg_c_tests ()
10170 test_linear_chain ();
10171 test_diamond ();
10172 test_fully_connected ();
10175 } // namespace selftest
10177 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
10178 - loop
10179 - nested loops
10180 - switch statement (a block with many out-edges)
10181 - something that jumps to itself
10182 - etc */
10184 #endif /* CHECKING_P */