tree-optimization/114485 - neg induction with partial vectors
[official-gcc.git] / gcc / tree-cfg.cc
blobbdffc3b4ed277724e81b7dd67fe7966e8ece0c13
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2024 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-iterator.h"
41 #include "gimple-fold.h"
42 #include "tree-eh.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "tree-ssa-dce.h"
58 #include "omp-general.h"
59 #include "omp-expand.h"
60 #include "tree-cfgcleanup.h"
61 #include "gimplify.h"
62 #include "attribs.h"
63 #include "selftest.h"
64 #include "opts.h"
65 #include "asan.h"
66 #include "profile.h"
67 #include "sreal.h"
69 /* This file contains functions for building the Control Flow Graph (CFG)
70 for a function tree. */
72 /* Local declarations. */
74 /* Initial capacity for the basic block array. */
75 static const int initial_cfg_capacity = 20;
77 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
78 which use a particular edge. The CASE_LABEL_EXPRs are chained together
79 via their CASE_CHAIN field, which we clear after we're done with the
80 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
82 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
83 update the case vector in response to edge redirections.
85 Right now this table is set up and torn down at key points in the
86 compilation process. It would be nice if we could make the table
87 more persistent. The key is getting notification of changes to
88 the CFG (particularly edge removal, creation and redirection). */
90 static hash_map<edge, tree> *edge_to_cases;
92 /* If we record edge_to_cases, this bitmap will hold indexes
93 of basic blocks that end in a GIMPLE_SWITCH which we touched
94 due to edge manipulations. */
96 static bitmap touched_switch_bbs;
98 /* OpenMP region idxs for blocks during cfg pass. */
99 static vec<int> bb_to_omp_idx;
101 /* CFG statistics. */
102 struct cfg_stats_d
104 long num_merged_labels;
107 static struct cfg_stats_d cfg_stats;
109 /* Data to pass to replace_block_vars_by_duplicates_1. */
110 struct replace_decls_d
112 hash_map<tree, tree> *vars_map;
113 tree to_context;
116 /* Hash table to store last discriminator assigned for each locus. */
117 struct locus_discrim_map
119 int location_line;
120 int discriminator;
123 /* Hashtable helpers. */
125 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
127 static inline hashval_t hash (const locus_discrim_map *);
128 static inline bool equal (const locus_discrim_map *,
129 const locus_discrim_map *);
132 /* Trivial hash function for a location_t. ITEM is a pointer to
133 a hash table entry that maps a location_t to a discriminator. */
135 inline hashval_t
136 locus_discrim_hasher::hash (const locus_discrim_map *item)
138 return item->location_line;
141 /* Equality function for the locus-to-discriminator map. A and B
142 point to the two hash table entries to compare. */
144 inline bool
145 locus_discrim_hasher::equal (const locus_discrim_map *a,
146 const locus_discrim_map *b)
148 return a->location_line == b->location_line;
151 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
153 /* Basic blocks and flowgraphs. */
154 static void make_blocks (gimple_seq);
156 /* Edges. */
157 static void make_edges (void);
158 static void assign_discriminators (void);
159 static void make_cond_expr_edges (basic_block);
160 static void make_gimple_switch_edges (gswitch *, basic_block);
161 static bool make_goto_expr_edges (basic_block);
162 static void make_gimple_asm_edges (basic_block);
163 static edge gimple_redirect_edge_and_branch (edge, basic_block);
164 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
166 /* Various helpers. */
167 static inline bool stmt_starts_bb_p (gimple *, gimple *);
168 static bool gimple_verify_flow_info (void);
169 static void gimple_make_forwarder_block (edge);
170 static gimple *first_non_label_stmt (basic_block);
171 static bool verify_gimple_transaction (gtransaction *);
172 static bool call_can_make_abnormal_goto (gimple *);
174 /* Flowgraph optimization and cleanup. */
175 static void gimple_merge_blocks (basic_block, basic_block);
176 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
177 static void remove_bb (basic_block);
178 static edge find_taken_edge_computed_goto (basic_block, tree);
179 static edge find_taken_edge_cond_expr (const gcond *, tree);
181 void
182 init_empty_tree_cfg_for_function (struct function *fn)
184 /* Initialize the basic block array. */
185 init_flow (fn);
186 profile_status_for_fn (fn) = PROFILE_ABSENT;
187 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
188 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
189 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
190 initial_cfg_capacity, true);
192 /* Build a mapping of labels to their associated blocks. */
193 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
194 initial_cfg_capacity, true);
196 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
197 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
199 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
200 = EXIT_BLOCK_PTR_FOR_FN (fn);
201 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
202 = ENTRY_BLOCK_PTR_FOR_FN (fn);
205 void
206 init_empty_tree_cfg (void)
208 init_empty_tree_cfg_for_function (cfun);
211 /*---------------------------------------------------------------------------
212 Create basic blocks
213 ---------------------------------------------------------------------------*/
215 /* Entry point to the CFG builder for trees. SEQ is the sequence of
216 statements to be added to the flowgraph. */
218 static void
219 build_gimple_cfg (gimple_seq seq)
221 /* Register specific gimple functions. */
222 gimple_register_cfg_hooks ();
224 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
226 init_empty_tree_cfg ();
228 make_blocks (seq);
230 /* Make sure there is always at least one block, even if it's empty. */
231 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
232 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
234 /* Adjust the size of the array. */
235 if (basic_block_info_for_fn (cfun)->length ()
236 < (size_t) n_basic_blocks_for_fn (cfun))
237 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
238 n_basic_blocks_for_fn (cfun));
240 /* To speed up statement iterator walks, we first purge dead labels. */
241 cleanup_dead_labels ();
243 /* Group case nodes to reduce the number of edges.
244 We do this after cleaning up dead labels because otherwise we miss
245 a lot of obvious case merging opportunities. */
246 group_case_labels ();
248 /* Create the edges of the flowgraph. */
249 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
250 make_edges ();
251 assign_discriminators ();
252 cleanup_dead_labels ();
253 delete discriminator_per_locus;
254 discriminator_per_locus = NULL;
257 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
258 them and propagate the information to LOOP. We assume that the annotations
259 come immediately before the condition in BB, if any. */
261 static void
262 replace_loop_annotate_in_block (basic_block bb, class loop *loop)
264 gimple_stmt_iterator gsi = gsi_last_bb (bb);
265 gimple *stmt = gsi_stmt (gsi);
267 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
268 return;
270 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
272 stmt = gsi_stmt (gsi);
273 if (gimple_code (stmt) != GIMPLE_CALL)
274 break;
275 if (!gimple_call_internal_p (stmt)
276 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
277 break;
279 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
281 case annot_expr_ivdep_kind:
282 loop->safelen = INT_MAX;
283 break;
284 case annot_expr_unroll_kind:
285 loop->unroll
286 = (unsigned short) tree_to_shwi (gimple_call_arg (stmt, 2));
287 cfun->has_unroll = true;
288 break;
289 case annot_expr_no_vector_kind:
290 loop->dont_vectorize = true;
291 break;
292 case annot_expr_vector_kind:
293 loop->force_vectorize = true;
294 cfun->has_force_vectorize_loops = true;
295 break;
296 case annot_expr_parallel_kind:
297 loop->can_be_parallel = true;
298 loop->safelen = INT_MAX;
299 break;
300 default:
301 gcc_unreachable ();
304 stmt = gimple_build_assign (gimple_call_lhs (stmt),
305 gimple_call_arg (stmt, 0));
306 gsi_replace (&gsi, stmt, true);
310 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
311 them and propagate the information to the loop. We assume that the
312 annotations come immediately before the condition of the loop. */
314 static void
315 replace_loop_annotate (void)
317 basic_block bb;
318 gimple_stmt_iterator gsi;
319 gimple *stmt;
321 for (auto loop : loops_list (cfun, 0))
323 /* Check all exit source blocks for annotations. */
324 for (auto e : get_loop_exit_edges (loop))
325 replace_loop_annotate_in_block (e->src, loop);
327 /* Push the global flag_finite_loops state down to individual loops. */
328 loop->finite_p = flag_finite_loops;
331 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
332 FOR_EACH_BB_FN (bb, cfun)
334 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
336 stmt = gsi_stmt (gsi);
337 if (gimple_code (stmt) != GIMPLE_CALL)
338 continue;
339 if (!gimple_call_internal_p (stmt)
340 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
341 continue;
343 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
345 case annot_expr_ivdep_kind:
346 case annot_expr_unroll_kind:
347 case annot_expr_no_vector_kind:
348 case annot_expr_vector_kind:
349 case annot_expr_parallel_kind:
350 break;
351 default:
352 gcc_unreachable ();
355 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
356 stmt = gimple_build_assign (gimple_call_lhs (stmt),
357 gimple_call_arg (stmt, 0));
358 gsi_replace (&gsi, stmt, true);
363 static unsigned int
364 execute_build_cfg (void)
366 gimple_seq body = gimple_body (current_function_decl);
368 build_gimple_cfg (body);
369 gimple_set_body (current_function_decl, NULL);
370 if (dump_file && (dump_flags & TDF_DETAILS))
372 fprintf (dump_file, "Scope blocks:\n");
373 dump_scope_blocks (dump_file, dump_flags);
375 cleanup_tree_cfg ();
377 bb_to_omp_idx.release ();
379 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
380 replace_loop_annotate ();
381 return 0;
384 namespace {
386 const pass_data pass_data_build_cfg =
388 GIMPLE_PASS, /* type */
389 "cfg", /* name */
390 OPTGROUP_NONE, /* optinfo_flags */
391 TV_TREE_CFG, /* tv_id */
392 PROP_gimple_leh, /* properties_required */
393 ( PROP_cfg | PROP_loops ), /* properties_provided */
394 0, /* properties_destroyed */
395 0, /* todo_flags_start */
396 0, /* todo_flags_finish */
399 class pass_build_cfg : public gimple_opt_pass
401 public:
402 pass_build_cfg (gcc::context *ctxt)
403 : gimple_opt_pass (pass_data_build_cfg, ctxt)
406 /* opt_pass methods: */
407 unsigned int execute (function *) final override
409 return execute_build_cfg ();
412 }; // class pass_build_cfg
414 } // anon namespace
416 gimple_opt_pass *
417 make_pass_build_cfg (gcc::context *ctxt)
419 return new pass_build_cfg (ctxt);
423 /* Return true if T is a computed goto. */
425 bool
426 computed_goto_p (gimple *t)
428 return (gimple_code (t) == GIMPLE_GOTO
429 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
432 /* Returns true if the sequence of statements STMTS only contains
433 a call to __builtin_unreachable (). */
435 bool
436 gimple_seq_unreachable_p (gimple_seq stmts)
438 if (stmts == NULL
439 /* Return false if -fsanitize=unreachable, we don't want to
440 optimize away those calls, but rather turn them into
441 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
442 later. */
443 || sanitize_flags_p (SANITIZE_UNREACHABLE))
444 return false;
446 gimple_stmt_iterator gsi = gsi_last (stmts);
448 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
449 return false;
451 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
453 gimple *stmt = gsi_stmt (gsi);
454 if (gimple_code (stmt) != GIMPLE_LABEL
455 && !is_gimple_debug (stmt)
456 && !gimple_clobber_p (stmt))
457 return false;
459 return true;
462 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
463 the other edge points to a bb with just __builtin_unreachable ().
464 I.e. return true for C->M edge in:
465 <bb C>:
467 if (something)
468 goto <bb N>;
469 else
470 goto <bb M>;
471 <bb N>:
472 __builtin_unreachable ();
473 <bb M>: */
475 bool
476 assert_unreachable_fallthru_edge_p (edge e)
478 basic_block pred_bb = e->src;
479 if (safe_is_a <gcond *> (*gsi_last_bb (pred_bb)))
481 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
482 if (other_bb == e->dest)
483 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
484 if (EDGE_COUNT (other_bb->succs) == 0)
485 return gimple_seq_unreachable_p (bb_seq (other_bb));
487 return false;
491 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
492 could alter control flow except via eh. We initialize the flag at
493 CFG build time and only ever clear it later. */
495 static void
496 gimple_call_initialize_ctrl_altering (gimple *stmt)
498 int flags = gimple_call_flags (stmt);
500 /* A call alters control flow if it can make an abnormal goto. */
501 if (call_can_make_abnormal_goto (stmt)
502 /* A call also alters control flow if it does not return. */
503 || flags & ECF_NORETURN
504 /* TM ending statements have backedges out of the transaction.
505 Return true so we split the basic block containing them.
506 Note that the TM_BUILTIN test is merely an optimization. */
507 || ((flags & ECF_TM_BUILTIN)
508 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
509 /* BUILT_IN_RETURN call is same as return statement. */
510 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
511 /* IFN_UNIQUE should be the last insn, to make checking for it
512 as cheap as possible. */
513 || (gimple_call_internal_p (stmt)
514 && gimple_call_internal_unique_p (stmt)))
515 gimple_call_set_ctrl_altering (stmt, true);
516 else
517 gimple_call_set_ctrl_altering (stmt, false);
521 /* Insert SEQ after BB and build a flowgraph. */
523 static basic_block
524 make_blocks_1 (gimple_seq seq, basic_block bb)
526 gimple_stmt_iterator i = gsi_start (seq);
527 gimple *stmt = NULL;
528 gimple *prev_stmt = NULL;
529 bool start_new_block = true;
530 bool first_stmt_of_seq = true;
532 while (!gsi_end_p (i))
534 /* PREV_STMT should only be set to a debug stmt if the debug
535 stmt is before nondebug stmts. Once stmt reaches a nondebug
536 nonlabel, prev_stmt will be set to it, so that
537 stmt_starts_bb_p will know to start a new block if a label is
538 found. However, if stmt was a label after debug stmts only,
539 keep the label in prev_stmt even if we find further debug
540 stmts, for there may be other labels after them, and they
541 should land in the same block. */
542 if (!prev_stmt || !stmt || !is_gimple_debug (stmt))
543 prev_stmt = stmt;
544 stmt = gsi_stmt (i);
546 if (stmt && is_gimple_call (stmt))
547 gimple_call_initialize_ctrl_altering (stmt);
549 /* If the statement starts a new basic block or if we have determined
550 in a previous pass that we need to create a new block for STMT, do
551 so now. */
552 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
554 if (!first_stmt_of_seq)
555 gsi_split_seq_before (&i, &seq);
556 bb = create_basic_block (seq, bb);
557 start_new_block = false;
558 prev_stmt = NULL;
561 /* Now add STMT to BB and create the subgraphs for special statement
562 codes. */
563 gimple_set_bb (stmt, bb);
565 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
566 next iteration. */
567 if (stmt_ends_bb_p (stmt))
569 /* If the stmt can make abnormal goto use a new temporary
570 for the assignment to the LHS. This makes sure the old value
571 of the LHS is available on the abnormal edge. Otherwise
572 we will end up with overlapping life-ranges for abnormal
573 SSA names. */
574 if (gimple_has_lhs (stmt)
575 && stmt_can_make_abnormal_goto (stmt)
576 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
578 tree lhs = gimple_get_lhs (stmt);
579 tree tmp = create_tmp_var (TREE_TYPE (lhs));
580 gimple *s = gimple_build_assign (lhs, tmp);
581 gimple_set_location (s, gimple_location (stmt));
582 gimple_set_block (s, gimple_block (stmt));
583 gimple_set_lhs (stmt, tmp);
584 gsi_insert_after (&i, s, GSI_SAME_STMT);
586 start_new_block = true;
589 gsi_next (&i);
590 first_stmt_of_seq = false;
592 return bb;
595 /* Build a flowgraph for the sequence of stmts SEQ. */
597 static void
598 make_blocks (gimple_seq seq)
600 /* Look for debug markers right before labels, and move the debug
601 stmts after the labels. Accepting labels among debug markers
602 adds no value, just complexity; if we wanted to annotate labels
603 with view numbers (so sequencing among markers would matter) or
604 somesuch, we're probably better off still moving the labels, but
605 adding other debug annotations in their original positions or
606 emitting nonbind or bind markers associated with the labels in
607 the original position of the labels.
609 Moving labels would probably be simpler, but we can't do that:
610 moving labels assigns label ids to them, and doing so because of
611 debug markers makes for -fcompare-debug and possibly even codegen
612 differences. So, we have to move the debug stmts instead. To
613 that end, we scan SEQ backwards, marking the position of the
614 latest (earliest we find) label, and moving debug stmts that are
615 not separated from it by nondebug nonlabel stmts after the
616 label. */
617 if (MAY_HAVE_DEBUG_MARKER_STMTS)
619 gimple_stmt_iterator label = gsi_none ();
621 for (gimple_stmt_iterator i = gsi_last (seq); !gsi_end_p (i); gsi_prev (&i))
623 gimple *stmt = gsi_stmt (i);
625 /* If this is the first label we encounter (latest in SEQ)
626 before nondebug stmts, record its position. */
627 if (is_a <glabel *> (stmt))
629 if (gsi_end_p (label))
630 label = i;
631 continue;
634 /* Without a recorded label position to move debug stmts to,
635 there's nothing to do. */
636 if (gsi_end_p (label))
637 continue;
639 /* Move the debug stmt at I after LABEL. */
640 if (is_gimple_debug (stmt))
642 gcc_assert (gimple_debug_nonbind_marker_p (stmt));
643 /* As STMT is removed, I advances to the stmt after
644 STMT, so the gsi_prev in the for "increment"
645 expression gets us to the stmt we're to visit after
646 STMT. LABEL, however, would advance to the moved
647 stmt if we passed it to gsi_move_after, so pass it a
648 copy instead, so as to keep LABEL pointing to the
649 LABEL. */
650 gimple_stmt_iterator copy = label;
651 gsi_move_after (&i, &copy);
652 continue;
655 /* There aren't any (more?) debug stmts before label, so
656 there isn't anything else to move after it. */
657 label = gsi_none ();
661 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
664 /* Create and return a new empty basic block after bb AFTER. */
666 static basic_block
667 create_bb (void *h, void *e, basic_block after)
669 basic_block bb;
671 gcc_assert (!e);
673 /* Create and initialize a new basic block. Since alloc_block uses
674 GC allocation that clears memory to allocate a basic block, we do
675 not have to clear the newly allocated basic block here. */
676 bb = alloc_block ();
678 bb->index = last_basic_block_for_fn (cfun);
679 bb->flags = BB_NEW;
680 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
682 /* Add the new block to the linked list of blocks. */
683 link_block (bb, after);
685 /* Grow the basic block array if needed. */
686 if ((size_t) last_basic_block_for_fn (cfun)
687 == basic_block_info_for_fn (cfun)->length ())
688 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
689 last_basic_block_for_fn (cfun) + 1);
691 /* Add the newly created block to the array. */
692 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
694 n_basic_blocks_for_fn (cfun)++;
695 last_basic_block_for_fn (cfun)++;
697 return bb;
701 /*---------------------------------------------------------------------------
702 Edge creation
703 ---------------------------------------------------------------------------*/
705 /* If basic block BB has an abnormal edge to a basic block
706 containing IFN_ABNORMAL_DISPATCHER internal call, return
707 that the dispatcher's basic block, otherwise return NULL. */
709 basic_block
710 get_abnormal_succ_dispatcher (basic_block bb)
712 edge e;
713 edge_iterator ei;
715 FOR_EACH_EDGE (e, ei, bb->succs)
716 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
718 gimple_stmt_iterator gsi
719 = gsi_start_nondebug_after_labels_bb (e->dest);
720 gimple *g = gsi_stmt (gsi);
721 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
722 return e->dest;
724 return NULL;
727 /* Helper function for make_edges. Create a basic block with
728 with ABNORMAL_DISPATCHER internal call in it if needed, and
729 create abnormal edges from BBS to it and from it to FOR_BB
730 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
732 static void
733 handle_abnormal_edges (basic_block *dispatcher_bbs, basic_block for_bb,
734 auto_vec<basic_block> *bbs, bool computed_goto)
736 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
737 unsigned int idx = 0;
738 basic_block bb;
739 bool inner = false;
741 if (!bb_to_omp_idx.is_empty ())
743 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
744 if (bb_to_omp_idx[for_bb->index] != 0)
745 inner = true;
748 /* If the dispatcher has been created already, then there are basic
749 blocks with abnormal edges to it, so just make a new edge to
750 for_bb. */
751 if (*dispatcher == NULL)
753 /* Check if there are any basic blocks that need to have
754 abnormal edges to this dispatcher. If there are none, return
755 early. */
756 if (bb_to_omp_idx.is_empty ())
758 if (bbs->is_empty ())
759 return;
761 else
763 FOR_EACH_VEC_ELT (*bbs, idx, bb)
764 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
765 break;
766 if (bb == NULL)
767 return;
770 /* Create the dispatcher bb. */
771 *dispatcher = create_basic_block (NULL, for_bb);
772 if (computed_goto)
774 /* Factor computed gotos into a common computed goto site. Also
775 record the location of that site so that we can un-factor the
776 gotos after we have converted back to normal form. */
777 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
779 /* Create the destination of the factored goto. Each original
780 computed goto will put its desired destination into this
781 variable and jump to the label we create immediately below. */
782 tree var = create_tmp_var (ptr_type_node, "gotovar");
784 /* Build a label for the new block which will contain the
785 factored computed goto. */
786 tree factored_label_decl
787 = create_artificial_label (UNKNOWN_LOCATION);
788 gimple *factored_computed_goto_label
789 = gimple_build_label (factored_label_decl);
790 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
792 /* Build our new computed goto. */
793 gimple *factored_computed_goto = gimple_build_goto (var);
794 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
796 FOR_EACH_VEC_ELT (*bbs, idx, bb)
798 if (!bb_to_omp_idx.is_empty ()
799 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
800 continue;
802 gsi = gsi_last_bb (bb);
803 gimple *last = gsi_stmt (gsi);
805 gcc_assert (computed_goto_p (last));
807 /* Copy the original computed goto's destination into VAR. */
808 gimple *assignment
809 = gimple_build_assign (var, gimple_goto_dest (last));
810 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
812 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
813 e->goto_locus = gimple_location (last);
814 gsi_remove (&gsi, true);
817 else
819 tree arg = inner ? boolean_true_node : boolean_false_node;
820 gcall *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
821 1, arg);
822 gimple_call_set_ctrl_altering (g, true);
823 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
824 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
826 /* Create predecessor edges of the dispatcher. */
827 FOR_EACH_VEC_ELT (*bbs, idx, bb)
829 if (!bb_to_omp_idx.is_empty ()
830 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
831 continue;
832 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
837 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
840 /* Creates outgoing edges for BB. Returns 1 when it ends with an
841 computed goto, returns 2 when it ends with a statement that
842 might return to this function via an nonlocal goto, otherwise
843 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
845 static int
846 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
848 gimple *last = *gsi_last_bb (bb);
849 bool fallthru = false;
850 int ret = 0;
852 if (!last)
853 return ret;
855 switch (gimple_code (last))
857 case GIMPLE_GOTO:
858 if (make_goto_expr_edges (bb))
859 ret = 1;
860 fallthru = false;
861 break;
862 case GIMPLE_RETURN:
864 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
865 e->goto_locus = gimple_location (last);
866 fallthru = false;
868 break;
869 case GIMPLE_COND:
870 make_cond_expr_edges (bb);
871 fallthru = false;
872 break;
873 case GIMPLE_SWITCH:
874 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
875 fallthru = false;
876 break;
877 case GIMPLE_RESX:
878 make_eh_edge (last);
879 fallthru = false;
880 break;
881 case GIMPLE_EH_DISPATCH:
882 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
883 break;
885 case GIMPLE_CALL:
886 /* If this function receives a nonlocal goto, then we need to
887 make edges from this call site to all the nonlocal goto
888 handlers. */
889 if (stmt_can_make_abnormal_goto (last))
890 ret = 2;
892 /* If this statement has reachable exception handlers, then
893 create abnormal edges to them. */
894 make_eh_edge (last);
896 /* BUILTIN_RETURN is really a return statement. */
897 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
899 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
900 fallthru = false;
902 /* Some calls are known not to return. */
903 else
904 fallthru = !gimple_call_noreturn_p (last);
905 break;
907 case GIMPLE_ASSIGN:
908 /* A GIMPLE_ASSIGN may throw internally and thus be considered
909 control-altering. */
910 if (is_ctrl_altering_stmt (last))
911 make_eh_edge (last);
912 fallthru = true;
913 break;
915 case GIMPLE_ASM:
916 make_gimple_asm_edges (bb);
917 fallthru = true;
918 break;
920 CASE_GIMPLE_OMP:
921 fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
922 break;
924 case GIMPLE_TRANSACTION:
926 gtransaction *txn = as_a <gtransaction *> (last);
927 tree label1 = gimple_transaction_label_norm (txn);
928 tree label2 = gimple_transaction_label_uninst (txn);
930 if (label1)
931 make_edge (bb, label_to_block (cfun, label1), EDGE_FALLTHRU);
932 if (label2)
933 make_edge (bb, label_to_block (cfun, label2),
934 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
936 tree label3 = gimple_transaction_label_over (txn);
937 if (gimple_transaction_subcode (txn)
938 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
939 make_edge (bb, label_to_block (cfun, label3), EDGE_TM_ABORT);
941 fallthru = false;
943 break;
945 default:
946 gcc_assert (!stmt_ends_bb_p (last));
947 fallthru = true;
948 break;
951 if (fallthru)
952 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
954 return ret;
957 /* Join all the blocks in the flowgraph. */
959 static void
960 make_edges (void)
962 basic_block bb;
963 struct omp_region *cur_region = NULL;
964 auto_vec<basic_block> ab_edge_goto;
965 auto_vec<basic_block> ab_edge_call;
966 int cur_omp_region_idx = 0;
968 /* Create an edge from entry to the first block with executable
969 statements in it. */
970 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
971 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
972 EDGE_FALLTHRU);
974 /* Traverse the basic block array placing edges. */
975 FOR_EACH_BB_FN (bb, cfun)
977 int mer;
979 if (!bb_to_omp_idx.is_empty ())
980 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
982 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
983 if (mer == 1)
984 ab_edge_goto.safe_push (bb);
985 else if (mer == 2)
986 ab_edge_call.safe_push (bb);
988 if (cur_region && bb_to_omp_idx.is_empty ())
989 bb_to_omp_idx.safe_grow_cleared (n_basic_blocks_for_fn (cfun), true);
992 /* Computed gotos are hell to deal with, especially if there are
993 lots of them with a large number of destinations. So we factor
994 them to a common computed goto location before we build the
995 edge list. After we convert back to normal form, we will un-factor
996 the computed gotos since factoring introduces an unwanted jump.
997 For non-local gotos and abnormal edges from calls to calls that return
998 twice or forced labels, factor the abnormal edges too, by having all
999 abnormal edges from the calls go to a common artificial basic block
1000 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
1001 basic block to all forced labels and calls returning twice.
1002 We do this per-OpenMP structured block, because those regions
1003 are guaranteed to be single entry single exit by the standard,
1004 so it is not allowed to enter or exit such regions abnormally this way,
1005 thus all computed gotos, non-local gotos and setjmp/longjmp calls
1006 must not transfer control across SESE region boundaries. */
1007 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
1009 gimple_stmt_iterator gsi;
1010 basic_block dispatcher_bb_array[2] = { NULL, NULL };
1011 basic_block *dispatcher_bbs = dispatcher_bb_array;
1012 int count = n_basic_blocks_for_fn (cfun);
1014 if (!bb_to_omp_idx.is_empty ())
1015 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
1017 FOR_EACH_BB_FN (bb, cfun)
1019 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1021 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1022 tree target;
1024 if (!label_stmt)
1025 break;
1027 target = gimple_label_label (label_stmt);
1029 /* Make an edge to every label block that has been marked as a
1030 potential target for a computed goto or a non-local goto. */
1031 if (FORCED_LABEL (target))
1032 handle_abnormal_edges (dispatcher_bbs, bb, &ab_edge_goto,
1033 true);
1034 if (DECL_NONLOCAL (target))
1036 handle_abnormal_edges (dispatcher_bbs, bb, &ab_edge_call,
1037 false);
1038 break;
1042 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1043 gsi_next_nondebug (&gsi);
1044 if (!gsi_end_p (gsi))
1046 /* Make an edge to every setjmp-like call. */
1047 gimple *call_stmt = gsi_stmt (gsi);
1048 if (is_gimple_call (call_stmt)
1049 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1050 || gimple_call_builtin_p (call_stmt,
1051 BUILT_IN_SETJMP_RECEIVER)))
1052 handle_abnormal_edges (dispatcher_bbs, bb, &ab_edge_call,
1053 false);
1057 if (!bb_to_omp_idx.is_empty ())
1058 XDELETE (dispatcher_bbs);
1061 omp_free_regions ();
1064 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1065 needed. Returns true if new bbs were created.
1066 Note: This is transitional code, and should not be used for new code. We
1067 should be able to get rid of this by rewriting all target va-arg
1068 gimplification hooks to use an interface gimple_build_cond_value as described
1069 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1071 bool
1072 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1074 gimple *stmt = gsi_stmt (*gsi);
1075 basic_block bb = gimple_bb (stmt);
1076 basic_block lastbb, afterbb;
1077 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1078 edge e;
1079 lastbb = make_blocks_1 (seq, bb);
1080 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1081 return false;
1082 e = split_block (bb, stmt);
1083 /* Move e->dest to come after the new basic blocks. */
1084 afterbb = e->dest;
1085 unlink_block (afterbb);
1086 link_block (afterbb, lastbb);
1087 redirect_edge_succ (e, bb->next_bb);
1088 bb = bb->next_bb;
1089 while (bb != afterbb)
1091 struct omp_region *cur_region = NULL;
1092 profile_count cnt = profile_count::zero ();
1093 bool all = true;
1095 int cur_omp_region_idx = 0;
1096 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1097 gcc_assert (!mer && !cur_region);
1098 add_bb_to_loop (bb, afterbb->loop_father);
1100 edge e;
1101 edge_iterator ei;
1102 FOR_EACH_EDGE (e, ei, bb->preds)
1104 if (e->count ().initialized_p ())
1105 cnt += e->count ();
1106 else
1107 all = false;
1109 tree_guess_outgoing_edge_probabilities (bb);
1110 if (all || profile_status_for_fn (cfun) == PROFILE_READ)
1111 bb->count = cnt;
1113 bb = bb->next_bb;
1115 return true;
1118 /* Find the next available discriminator value for LOCUS. The
1119 discriminator distinguishes among several basic blocks that
1120 share a common locus, allowing for more accurate sample-based
1121 profiling. */
1123 static int
1124 next_discriminator_for_locus (int line)
1126 struct locus_discrim_map item;
1127 struct locus_discrim_map **slot;
1129 item.location_line = line;
1130 item.discriminator = 0;
1131 slot = discriminator_per_locus->find_slot_with_hash (&item, line, INSERT);
1132 gcc_assert (slot);
1133 if (*slot == HTAB_EMPTY_ENTRY)
1135 *slot = XNEW (struct locus_discrim_map);
1136 gcc_assert (*slot);
1137 (*slot)->location_line = line;
1138 (*slot)->discriminator = 0;
1140 (*slot)->discriminator++;
1141 return (*slot)->discriminator;
1144 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1146 static bool
1147 same_line_p (location_t locus1, expanded_location *from, location_t locus2)
1149 expanded_location to;
1151 if (locus1 == locus2)
1152 return true;
1154 to = expand_location (locus2);
1156 if (from->line != to.line)
1157 return false;
1158 if (from->file == to.file)
1159 return true;
1160 return (from->file != NULL
1161 && to.file != NULL
1162 && filename_cmp (from->file, to.file) == 0);
1165 /* Assign a unique discriminator value to all statements in block bb that
1166 have the same line number as locus. */
1168 static void
1169 assign_discriminator (location_t locus, basic_block bb)
1171 gimple_stmt_iterator gsi;
1172 int discriminator;
1174 if (locus == UNKNOWN_LOCATION)
1175 return;
1177 expanded_location locus_e = expand_location (locus);
1179 discriminator = next_discriminator_for_locus (locus_e.line);
1181 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1183 gimple *stmt = gsi_stmt (gsi);
1184 location_t stmt_locus = gimple_location (stmt);
1185 if (same_line_p (locus, &locus_e, stmt_locus))
1186 gimple_set_location (stmt,
1187 location_with_discriminator (stmt_locus, discriminator));
1191 /* Assign discriminators to statement locations. */
1193 static void
1194 assign_discriminators (void)
1196 basic_block bb;
1198 FOR_EACH_BB_FN (bb, cfun)
1200 edge e;
1201 edge_iterator ei;
1202 gimple_stmt_iterator gsi;
1203 location_t curr_locus = UNKNOWN_LOCATION;
1204 expanded_location curr_locus_e = {};
1205 int curr_discr = 0;
1207 /* Traverse the basic block, if two function calls within a basic block
1208 are mapped to the same line, assign a new discriminator because a call
1209 stmt could be a split point of a basic block. */
1210 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1212 gimple *stmt = gsi_stmt (gsi);
1214 /* Don't allow debug stmts to affect discriminators, but
1215 allow them to take discriminators when they're on the
1216 same line as the preceding nondebug stmt. */
1217 if (is_gimple_debug (stmt))
1219 if (curr_locus != UNKNOWN_LOCATION
1220 && same_line_p (curr_locus, &curr_locus_e,
1221 gimple_location (stmt)))
1223 location_t loc = gimple_location (stmt);
1224 location_t dloc = location_with_discriminator (loc,
1225 curr_discr);
1226 gimple_set_location (stmt, dloc);
1228 continue;
1230 if (curr_locus == UNKNOWN_LOCATION)
1232 curr_locus = gimple_location (stmt);
1233 curr_locus_e = expand_location (curr_locus);
1235 else if (!same_line_p (curr_locus, &curr_locus_e, gimple_location (stmt)))
1237 curr_locus = gimple_location (stmt);
1238 curr_locus_e = expand_location (curr_locus);
1239 curr_discr = 0;
1241 else if (curr_discr != 0)
1243 location_t loc = gimple_location (stmt);
1244 location_t dloc = location_with_discriminator (loc, curr_discr);
1245 gimple_set_location (stmt, dloc);
1247 /* Allocate a new discriminator for CALL stmt. */
1248 if (gimple_code (stmt) == GIMPLE_CALL)
1249 curr_discr = next_discriminator_for_locus (curr_locus);
1252 gimple *last = last_nondebug_stmt (bb);
1253 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1254 if (locus == UNKNOWN_LOCATION)
1255 continue;
1257 expanded_location locus_e = expand_location (locus);
1259 FOR_EACH_EDGE (e, ei, bb->succs)
1261 gimple *first = first_non_label_stmt (e->dest);
1262 gimple *last = last_nondebug_stmt (e->dest);
1264 gimple *stmt_on_same_line = NULL;
1265 if (first && same_line_p (locus, &locus_e,
1266 gimple_location (first)))
1267 stmt_on_same_line = first;
1268 else if (last && same_line_p (locus, &locus_e,
1269 gimple_location (last)))
1270 stmt_on_same_line = last;
1272 if (stmt_on_same_line)
1274 if (has_discriminator (gimple_location (stmt_on_same_line))
1275 && !has_discriminator (locus))
1276 assign_discriminator (locus, bb);
1277 else
1278 assign_discriminator (locus, e->dest);
1284 /* Create the edges for a GIMPLE_COND starting at block BB. */
1286 static void
1287 make_cond_expr_edges (basic_block bb)
1289 gcond *entry = as_a <gcond *> (*gsi_last_bb (bb));
1290 gimple *then_stmt, *else_stmt;
1291 basic_block then_bb, else_bb;
1292 tree then_label, else_label;
1293 edge e;
1295 gcc_assert (entry);
1297 /* Entry basic blocks for each component. */
1298 then_label = gimple_cond_true_label (entry);
1299 else_label = gimple_cond_false_label (entry);
1300 then_bb = label_to_block (cfun, then_label);
1301 else_bb = label_to_block (cfun, else_label);
1302 then_stmt = first_stmt (then_bb);
1303 else_stmt = first_stmt (else_bb);
1305 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1306 e->goto_locus = gimple_location (then_stmt);
1307 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1308 if (e)
1309 e->goto_locus = gimple_location (else_stmt);
1311 /* We do not need the labels anymore. */
1312 gimple_cond_set_true_label (entry, NULL_TREE);
1313 gimple_cond_set_false_label (entry, NULL_TREE);
1317 /* Called for each element in the hash table (P) as we delete the
1318 edge to cases hash table.
1320 Clear all the CASE_CHAINs to prevent problems with copying of
1321 SWITCH_EXPRs and structure sharing rules, then free the hash table
1322 element. */
1324 bool
1325 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1327 tree t, next;
1329 for (t = value; t; t = next)
1331 next = CASE_CHAIN (t);
1332 CASE_CHAIN (t) = NULL;
1335 return true;
1338 /* Start recording information mapping edges to case labels. */
1340 void
1341 start_recording_case_labels (void)
1343 gcc_assert (edge_to_cases == NULL);
1344 edge_to_cases = new hash_map<edge, tree>;
1345 touched_switch_bbs = BITMAP_ALLOC (NULL);
1348 /* Return nonzero if we are recording information for case labels. */
1350 static bool
1351 recording_case_labels_p (void)
1353 return (edge_to_cases != NULL);
1356 /* Stop recording information mapping edges to case labels and
1357 remove any information we have recorded. */
1358 void
1359 end_recording_case_labels (void)
1361 bitmap_iterator bi;
1362 unsigned i;
1363 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1364 delete edge_to_cases;
1365 edge_to_cases = NULL;
1366 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1368 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1369 if (bb)
1371 if (gswitch *stmt = safe_dyn_cast <gswitch *> (*gsi_last_bb (bb)))
1372 group_case_labels_stmt (stmt);
1375 BITMAP_FREE (touched_switch_bbs);
1378 /* If we are inside a {start,end}_recording_cases block, then return
1379 a chain of CASE_LABEL_EXPRs from T which reference E.
1381 Otherwise return NULL. */
1383 tree
1384 get_cases_for_edge (edge e, gswitch *t)
1386 tree *slot;
1387 size_t i, n;
1389 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1390 chains available. Return NULL so the caller can detect this case. */
1391 if (!recording_case_labels_p ())
1392 return NULL;
1394 slot = edge_to_cases->get (e);
1395 if (slot)
1396 return *slot;
1398 /* If we did not find E in the hash table, then this must be the first
1399 time we have been queried for information about E & T. Add all the
1400 elements from T to the hash table then perform the query again. */
1402 n = gimple_switch_num_labels (t);
1403 for (i = 0; i < n; i++)
1405 tree elt = gimple_switch_label (t, i);
1406 tree lab = CASE_LABEL (elt);
1407 basic_block label_bb = label_to_block (cfun, lab);
1408 edge this_edge = find_edge (e->src, label_bb);
1410 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1411 a new chain. */
1412 tree &s = edge_to_cases->get_or_insert (this_edge);
1413 CASE_CHAIN (elt) = s;
1414 s = elt;
1417 return *edge_to_cases->get (e);
1420 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1422 static void
1423 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1425 size_t i, n;
1427 n = gimple_switch_num_labels (entry);
1429 for (i = 0; i < n; ++i)
1431 basic_block label_bb = gimple_switch_label_bb (cfun, entry, i);
1432 make_edge (bb, label_bb, 0);
1437 /* Return the basic block holding label DEST. */
1439 basic_block
1440 label_to_block (struct function *ifun, tree dest)
1442 int uid = LABEL_DECL_UID (dest);
1444 /* We would die hard when faced by an undefined label. Emit a label to
1445 the very first basic block. This will hopefully make even the dataflow
1446 and undefined variable warnings quite right. */
1447 if (seen_error () && uid < 0)
1449 gimple_stmt_iterator gsi =
1450 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1451 gimple *stmt;
1453 stmt = gimple_build_label (dest);
1454 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1455 uid = LABEL_DECL_UID (dest);
1457 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1458 return NULL;
1459 return (*ifun->cfg->x_label_to_block_map)[uid];
1462 /* Create edges for a goto statement at block BB. Returns true
1463 if abnormal edges should be created. */
1465 static bool
1466 make_goto_expr_edges (basic_block bb)
1468 gimple_stmt_iterator last = gsi_last_bb (bb);
1469 gimple *goto_t = gsi_stmt (last);
1471 /* A simple GOTO creates normal edges. */
1472 if (simple_goto_p (goto_t))
1474 tree dest = gimple_goto_dest (goto_t);
1475 basic_block label_bb = label_to_block (cfun, dest);
1476 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1477 e->goto_locus = gimple_location (goto_t);
1478 gsi_remove (&last, true);
1479 return false;
1482 /* A computed GOTO creates abnormal edges. */
1483 return true;
1486 /* Create edges for an asm statement with labels at block BB. */
1488 static void
1489 make_gimple_asm_edges (basic_block bb)
1491 gasm *stmt = as_a <gasm *> (*gsi_last_bb (bb));
1492 int i, n = gimple_asm_nlabels (stmt);
1494 for (i = 0; i < n; ++i)
1496 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1497 basic_block label_bb = label_to_block (cfun, label);
1498 make_edge (bb, label_bb, 0);
1502 /*---------------------------------------------------------------------------
1503 Flowgraph analysis
1504 ---------------------------------------------------------------------------*/
1506 /* Cleanup useless labels in basic blocks. This is something we wish
1507 to do early because it allows us to group case labels before creating
1508 the edges for the CFG, and it speeds up block statement iterators in
1509 all passes later on.
1510 We rerun this pass after CFG is created, to get rid of the labels that
1511 are no longer referenced. After then we do not run it any more, since
1512 (almost) no new labels should be created. */
1514 /* A map from basic block index to the leading label of that block. */
1515 struct label_record
1517 /* The label. */
1518 tree label;
1520 /* True if the label is referenced from somewhere. */
1521 bool used;
1524 /* Given LABEL return the first label in the same basic block. */
1526 static tree
1527 main_block_label (tree label, label_record *label_for_bb)
1529 basic_block bb = label_to_block (cfun, label);
1530 tree main_label = label_for_bb[bb->index].label;
1532 /* label_to_block possibly inserted undefined label into the chain. */
1533 if (!main_label)
1535 label_for_bb[bb->index].label = label;
1536 main_label = label;
1539 label_for_bb[bb->index].used = true;
1540 return main_label;
1543 /* Clean up redundant labels within the exception tree. */
1545 static void
1546 cleanup_dead_labels_eh (label_record *label_for_bb)
1548 eh_landing_pad lp;
1549 eh_region r;
1550 tree lab;
1551 int i;
1553 if (cfun->eh == NULL)
1554 return;
1556 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1557 if (lp && lp->post_landing_pad)
1559 lab = main_block_label (lp->post_landing_pad, label_for_bb);
1560 if (lab != lp->post_landing_pad)
1562 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1563 lp->post_landing_pad = lab;
1564 EH_LANDING_PAD_NR (lab) = lp->index;
1568 FOR_ALL_EH_REGION (r)
1569 switch (r->type)
1571 case ERT_CLEANUP:
1572 case ERT_MUST_NOT_THROW:
1573 break;
1575 case ERT_TRY:
1577 eh_catch c;
1578 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1580 lab = c->label;
1581 if (lab)
1582 c->label = main_block_label (lab, label_for_bb);
1585 break;
1587 case ERT_ALLOWED_EXCEPTIONS:
1588 lab = r->u.allowed.label;
1589 if (lab)
1590 r->u.allowed.label = main_block_label (lab, label_for_bb);
1591 break;
1596 /* Cleanup redundant labels. This is a three-step process:
1597 1) Find the leading label for each block.
1598 2) Redirect all references to labels to the leading labels.
1599 3) Cleanup all useless labels. */
1601 void
1602 cleanup_dead_labels (void)
1604 basic_block bb;
1605 label_record *label_for_bb = XCNEWVEC (struct label_record,
1606 last_basic_block_for_fn (cfun));
1608 /* Find a suitable label for each block. We use the first user-defined
1609 label if there is one, or otherwise just the first label we see. */
1610 FOR_EACH_BB_FN (bb, cfun)
1612 gimple_stmt_iterator i;
1614 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1616 tree label;
1617 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1619 if (!label_stmt)
1620 break;
1622 label = gimple_label_label (label_stmt);
1624 /* If we have not yet seen a label for the current block,
1625 remember this one and see if there are more labels. */
1626 if (!label_for_bb[bb->index].label)
1628 label_for_bb[bb->index].label = label;
1629 continue;
1632 /* If we did see a label for the current block already, but it
1633 is an artificially created label, replace it if the current
1634 label is a user defined label. */
1635 if (!DECL_ARTIFICIAL (label)
1636 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1638 label_for_bb[bb->index].label = label;
1639 break;
1644 /* Now redirect all jumps/branches to the selected label.
1645 First do so for each block ending in a control statement. */
1646 FOR_EACH_BB_FN (bb, cfun)
1648 gimple *stmt = *gsi_last_bb (bb);
1649 tree label, new_label;
1651 if (!stmt)
1652 continue;
1654 switch (gimple_code (stmt))
1656 case GIMPLE_COND:
1658 gcond *cond_stmt = as_a <gcond *> (stmt);
1659 label = gimple_cond_true_label (cond_stmt);
1660 if (label)
1662 new_label = main_block_label (label, label_for_bb);
1663 if (new_label != label)
1664 gimple_cond_set_true_label (cond_stmt, new_label);
1667 label = gimple_cond_false_label (cond_stmt);
1668 if (label)
1670 new_label = main_block_label (label, label_for_bb);
1671 if (new_label != label)
1672 gimple_cond_set_false_label (cond_stmt, new_label);
1675 break;
1677 case GIMPLE_SWITCH:
1679 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1680 size_t i, n = gimple_switch_num_labels (switch_stmt);
1682 /* Replace all destination labels. */
1683 for (i = 0; i < n; ++i)
1685 tree case_label = gimple_switch_label (switch_stmt, i);
1686 label = CASE_LABEL (case_label);
1687 new_label = main_block_label (label, label_for_bb);
1688 if (new_label != label)
1689 CASE_LABEL (case_label) = new_label;
1691 break;
1694 case GIMPLE_ASM:
1696 gasm *asm_stmt = as_a <gasm *> (stmt);
1697 int i, n = gimple_asm_nlabels (asm_stmt);
1699 for (i = 0; i < n; ++i)
1701 tree cons = gimple_asm_label_op (asm_stmt, i);
1702 tree label = main_block_label (TREE_VALUE (cons), label_for_bb);
1703 TREE_VALUE (cons) = label;
1705 break;
1708 /* We have to handle gotos until they're removed, and we don't
1709 remove them until after we've created the CFG edges. */
1710 case GIMPLE_GOTO:
1711 if (!computed_goto_p (stmt))
1713 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1714 label = gimple_goto_dest (goto_stmt);
1715 new_label = main_block_label (label, label_for_bb);
1716 if (new_label != label)
1717 gimple_goto_set_dest (goto_stmt, new_label);
1719 break;
1721 case GIMPLE_TRANSACTION:
1723 gtransaction *txn = as_a <gtransaction *> (stmt);
1725 label = gimple_transaction_label_norm (txn);
1726 if (label)
1728 new_label = main_block_label (label, label_for_bb);
1729 if (new_label != label)
1730 gimple_transaction_set_label_norm (txn, new_label);
1733 label = gimple_transaction_label_uninst (txn);
1734 if (label)
1736 new_label = main_block_label (label, label_for_bb);
1737 if (new_label != label)
1738 gimple_transaction_set_label_uninst (txn, new_label);
1741 label = gimple_transaction_label_over (txn);
1742 if (label)
1744 new_label = main_block_label (label, label_for_bb);
1745 if (new_label != label)
1746 gimple_transaction_set_label_over (txn, new_label);
1749 break;
1751 default:
1752 break;
1756 /* Do the same for the exception region tree labels. */
1757 cleanup_dead_labels_eh (label_for_bb);
1759 /* Finally, purge dead labels. All user-defined labels and labels that
1760 can be the target of non-local gotos and labels which have their
1761 address taken are preserved. */
1762 FOR_EACH_BB_FN (bb, cfun)
1764 gimple_stmt_iterator i;
1765 tree label_for_this_bb = label_for_bb[bb->index].label;
1767 if (!label_for_this_bb)
1768 continue;
1770 /* If the main label of the block is unused, we may still remove it. */
1771 if (!label_for_bb[bb->index].used)
1772 label_for_this_bb = NULL;
1774 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1776 tree label;
1777 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1779 if (!label_stmt)
1780 break;
1782 label = gimple_label_label (label_stmt);
1784 if (label == label_for_this_bb
1785 || !DECL_ARTIFICIAL (label)
1786 || DECL_NONLOCAL (label)
1787 || FORCED_LABEL (label))
1788 gsi_next (&i);
1789 else
1791 gcc_checking_assert (EH_LANDING_PAD_NR (label) == 0);
1792 gsi_remove (&i, true);
1797 free (label_for_bb);
1800 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1801 the ones jumping to the same label.
1802 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1804 bool
1805 group_case_labels_stmt (gswitch *stmt)
1807 int old_size = gimple_switch_num_labels (stmt);
1808 int i, next_index, new_size;
1809 basic_block default_bb = NULL;
1810 hash_set<tree> *removed_labels = NULL;
1812 default_bb = gimple_switch_default_bb (cfun, stmt);
1814 /* Look for possible opportunities to merge cases. */
1815 new_size = i = 1;
1816 while (i < old_size)
1818 tree base_case, base_high;
1819 basic_block base_bb;
1821 base_case = gimple_switch_label (stmt, i);
1823 gcc_assert (base_case);
1824 base_bb = label_to_block (cfun, CASE_LABEL (base_case));
1826 /* Discard cases that have the same destination as the default case or
1827 whose destination blocks have already been removed as unreachable. */
1828 if (base_bb == NULL
1829 || base_bb == default_bb
1830 || (removed_labels
1831 && removed_labels->contains (CASE_LABEL (base_case))))
1833 i++;
1834 continue;
1837 base_high = CASE_HIGH (base_case)
1838 ? CASE_HIGH (base_case)
1839 : CASE_LOW (base_case);
1840 next_index = i + 1;
1842 /* Try to merge case labels. Break out when we reach the end
1843 of the label vector or when we cannot merge the next case
1844 label with the current one. */
1845 while (next_index < old_size)
1847 tree merge_case = gimple_switch_label (stmt, next_index);
1848 basic_block merge_bb = label_to_block (cfun, CASE_LABEL (merge_case));
1849 wide_int bhp1 = wi::to_wide (base_high) + 1;
1851 /* Merge the cases if they jump to the same place,
1852 and their ranges are consecutive. */
1853 if (merge_bb == base_bb
1854 && (removed_labels == NULL
1855 || !removed_labels->contains (CASE_LABEL (merge_case)))
1856 && wi::to_wide (CASE_LOW (merge_case)) == bhp1)
1858 base_high
1859 = (CASE_HIGH (merge_case)
1860 ? CASE_HIGH (merge_case) : CASE_LOW (merge_case));
1861 CASE_HIGH (base_case) = base_high;
1862 next_index++;
1864 else
1865 break;
1868 /* Discard cases that have an unreachable destination block. */
1869 if (EDGE_COUNT (base_bb->succs) == 0
1870 && gimple_seq_unreachable_p (bb_seq (base_bb))
1871 /* Don't optimize this if __builtin_unreachable () is the
1872 implicitly added one by the C++ FE too early, before
1873 -Wreturn-type can be diagnosed. We'll optimize it later
1874 during switchconv pass or any other cfg cleanup. */
1875 && (gimple_in_ssa_p (cfun)
1876 || (LOCATION_LOCUS (gimple_location (last_nondebug_stmt (base_bb)))
1877 != BUILTINS_LOCATION)))
1879 edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1880 if (base_edge != NULL)
1882 for (gimple_stmt_iterator gsi = gsi_start_bb (base_bb);
1883 !gsi_end_p (gsi); gsi_next (&gsi))
1884 if (glabel *stmt = dyn_cast <glabel *> (gsi_stmt (gsi)))
1886 if (FORCED_LABEL (gimple_label_label (stmt))
1887 || DECL_NONLOCAL (gimple_label_label (stmt)))
1889 /* Forced/non-local labels aren't going to be removed,
1890 but they will be moved to some neighbouring basic
1891 block. If some later case label refers to one of
1892 those labels, we should throw that case away rather
1893 than keeping it around and refering to some random
1894 other basic block without an edge to it. */
1895 if (removed_labels == NULL)
1896 removed_labels = new hash_set<tree>;
1897 removed_labels->add (gimple_label_label (stmt));
1900 else
1901 break;
1902 remove_edge_and_dominated_blocks (base_edge);
1904 i = next_index;
1905 continue;
1908 if (new_size < i)
1909 gimple_switch_set_label (stmt, new_size,
1910 gimple_switch_label (stmt, i));
1911 i = next_index;
1912 new_size++;
1915 gcc_assert (new_size <= old_size);
1917 if (new_size < old_size)
1918 gimple_switch_set_num_labels (stmt, new_size);
1920 delete removed_labels;
1921 return new_size < old_size;
1924 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1925 and scan the sorted vector of cases. Combine the ones jumping to the
1926 same label. */
1928 bool
1929 group_case_labels (void)
1931 basic_block bb;
1932 bool changed = false;
1934 FOR_EACH_BB_FN (bb, cfun)
1936 if (gswitch *stmt = safe_dyn_cast <gswitch *> (*gsi_last_bb (bb)))
1937 changed |= group_case_labels_stmt (stmt);
1940 return changed;
1943 /* Checks whether we can merge block B into block A. */
1945 static bool
1946 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1948 gimple *stmt;
1950 if (!single_succ_p (a))
1951 return false;
1953 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1954 return false;
1956 if (single_succ (a) != b)
1957 return false;
1959 if (!single_pred_p (b))
1960 return false;
1962 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1963 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1964 return false;
1966 /* If A ends by a statement causing exceptions or something similar, we
1967 cannot merge the blocks. */
1968 stmt = *gsi_last_bb (a);
1969 if (stmt && stmt_ends_bb_p (stmt))
1970 return false;
1972 /* Examine the labels at the beginning of B. */
1973 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1974 gsi_next (&gsi))
1976 tree lab;
1977 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1978 if (!label_stmt)
1979 break;
1980 lab = gimple_label_label (label_stmt);
1982 /* Do not remove user forced labels or for -O0 any user labels. */
1983 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1984 return false;
1987 /* Protect simple loop latches. We only want to avoid merging
1988 the latch with the loop header or with a block in another
1989 loop in this case. */
1990 if (current_loops
1991 && b->loop_father->latch == b
1992 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1993 && (b->loop_father->header == a
1994 || b->loop_father != a->loop_father))
1995 return false;
1997 /* It must be possible to eliminate all phi nodes in B. If ssa form
1998 is not up-to-date and a name-mapping is registered, we cannot eliminate
1999 any phis. Symbols marked for renaming are never a problem though. */
2000 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
2001 gsi_next (&gsi))
2003 gphi *phi = gsi.phi ();
2004 /* Technically only new names matter. */
2005 if (name_registered_for_update_p (PHI_RESULT (phi)))
2006 return false;
2009 /* When not optimizing, don't merge if we'd lose goto_locus. */
2010 if (!optimize
2011 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
2013 location_t goto_locus = single_succ_edge (a)->goto_locus;
2014 gimple_stmt_iterator prev, next;
2015 prev = gsi_last_nondebug_bb (a);
2016 next = gsi_after_labels (b);
2017 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
2018 gsi_next_nondebug (&next);
2019 if ((gsi_end_p (prev)
2020 || gimple_location (gsi_stmt (prev)) != goto_locus)
2021 && (gsi_end_p (next)
2022 || gimple_location (gsi_stmt (next)) != goto_locus))
2023 return false;
2026 return true;
2029 /* Replaces all uses of NAME by VAL. */
2031 void
2032 replace_uses_by (tree name, tree val)
2034 imm_use_iterator imm_iter;
2035 use_operand_p use;
2036 gimple *stmt;
2037 edge e;
2039 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
2041 /* Mark the block if we change the last stmt in it. */
2042 if (cfgcleanup_altered_bbs
2043 && stmt_ends_bb_p (stmt))
2044 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
2046 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
2048 replace_exp (use, val);
2050 if (gimple_code (stmt) == GIMPLE_PHI)
2052 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
2053 PHI_ARG_INDEX_FROM_USE (use));
2054 if (e->flags & EDGE_ABNORMAL
2055 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
2057 /* This can only occur for virtual operands, since
2058 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
2059 would prevent replacement. */
2060 gcc_checking_assert (virtual_operand_p (name));
2061 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
2066 if (gimple_code (stmt) != GIMPLE_PHI)
2068 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2069 gimple *orig_stmt = stmt;
2070 size_t i;
2072 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
2073 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
2074 only change sth from non-invariant to invariant, and only
2075 when propagating constants. */
2076 if (is_gimple_min_invariant (val))
2077 for (i = 0; i < gimple_num_ops (stmt); i++)
2079 tree op = gimple_op (stmt, i);
2080 /* Operands may be empty here. For example, the labels
2081 of a GIMPLE_COND are nulled out following the creation
2082 of the corresponding CFG edges. */
2083 if (op && TREE_CODE (op) == ADDR_EXPR)
2084 recompute_tree_invariant_for_addr_expr (op);
2087 if (fold_stmt (&gsi))
2088 stmt = gsi_stmt (gsi);
2090 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
2091 gimple_purge_dead_eh_edges (gimple_bb (stmt));
2093 update_stmt (stmt);
2097 gcc_checking_assert (has_zero_uses (name));
2099 /* Also update the trees stored in loop structures. */
2100 if (current_loops)
2102 for (auto loop : loops_list (cfun, 0))
2103 substitute_in_loop_info (loop, name, val);
2107 /* Merge block B into block A. */
2109 static void
2110 gimple_merge_blocks (basic_block a, basic_block b)
2112 gimple_stmt_iterator last, gsi;
2113 gphi_iterator psi;
2115 if (dump_file)
2116 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
2118 /* Remove all single-valued PHI nodes from block B of the form
2119 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
2120 gsi = gsi_last_bb (a);
2121 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
2123 gimple *phi = gsi_stmt (psi);
2124 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
2125 gimple *copy;
2126 bool may_replace_uses = (virtual_operand_p (def)
2127 || may_propagate_copy (def, use));
2129 /* In case we maintain loop closed ssa form, do not propagate arguments
2130 of loop exit phi nodes. */
2131 if (current_loops
2132 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
2133 && !virtual_operand_p (def)
2134 && TREE_CODE (use) == SSA_NAME
2135 && a->loop_father != b->loop_father)
2136 may_replace_uses = false;
2138 if (!may_replace_uses)
2140 gcc_assert (!virtual_operand_p (def));
2142 /* Note that just emitting the copies is fine -- there is no problem
2143 with ordering of phi nodes. This is because A is the single
2144 predecessor of B, therefore results of the phi nodes cannot
2145 appear as arguments of the phi nodes. */
2146 copy = gimple_build_assign (def, use);
2147 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
2148 remove_phi_node (&psi, false);
2150 else
2152 /* If we deal with a PHI for virtual operands, we can simply
2153 propagate these without fussing with folding or updating
2154 the stmt. */
2155 if (virtual_operand_p (def))
2157 imm_use_iterator iter;
2158 use_operand_p use_p;
2159 gimple *stmt;
2161 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
2162 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2163 SET_USE (use_p, use);
2165 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2166 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
2168 else
2169 replace_uses_by (def, use);
2171 remove_phi_node (&psi, true);
2175 /* Ensure that B follows A. */
2176 move_block_after (b, a);
2178 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2179 gcc_assert (!*gsi_last_bb (a)
2180 || !stmt_ends_bb_p (*gsi_last_bb (a)));
2182 /* Remove labels from B and set gimple_bb to A for other statements. */
2183 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2185 gimple *stmt = gsi_stmt (gsi);
2186 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2188 tree label = gimple_label_label (label_stmt);
2189 int lp_nr;
2191 gsi_remove (&gsi, false);
2193 /* Now that we can thread computed gotos, we might have
2194 a situation where we have a forced label in block B
2195 However, the label at the start of block B might still be
2196 used in other ways (think about the runtime checking for
2197 Fortran assigned gotos). So we cannot just delete the
2198 label. Instead we move the label to the start of block A. */
2199 if (FORCED_LABEL (label))
2201 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2202 tree first_label = NULL_TREE;
2203 if (!gsi_end_p (dest_gsi))
2204 if (glabel *first_label_stmt
2205 = dyn_cast <glabel *> (gsi_stmt (dest_gsi)))
2206 first_label = gimple_label_label (first_label_stmt);
2207 if (first_label
2208 && (DECL_NONLOCAL (first_label)
2209 || EH_LANDING_PAD_NR (first_label) != 0))
2210 gsi_insert_after (&dest_gsi, stmt, GSI_NEW_STMT);
2211 else
2212 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2214 /* Other user labels keep around in a form of a debug stmt. */
2215 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_BIND_STMTS)
2217 gimple *dbg = gimple_build_debug_bind (label,
2218 integer_zero_node,
2219 stmt);
2220 gimple_debug_bind_reset_value (dbg);
2221 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2224 lp_nr = EH_LANDING_PAD_NR (label);
2225 if (lp_nr)
2227 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2228 lp->post_landing_pad = NULL;
2231 else
2233 gimple_set_bb (stmt, a);
2234 gsi_next (&gsi);
2238 /* When merging two BBs, if their counts are different, the larger count
2239 is selected as the new bb count. This is to handle inconsistent
2240 profiles. */
2241 if (a->loop_father == b->loop_father)
2243 a->count = a->count.merge (b->count);
2246 /* Merge the sequences. */
2247 last = gsi_last_bb (a);
2248 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2249 set_bb_seq (b, NULL);
2251 if (cfgcleanup_altered_bbs)
2252 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2256 /* Return the one of two successors of BB that is not reachable by a
2257 complex edge, if there is one. Else, return BB. We use
2258 this in optimizations that use post-dominators for their heuristics,
2259 to catch the cases in C++ where function calls are involved. */
2261 basic_block
2262 single_noncomplex_succ (basic_block bb)
2264 edge e0, e1;
2265 if (EDGE_COUNT (bb->succs) != 2)
2266 return bb;
2268 e0 = EDGE_SUCC (bb, 0);
2269 e1 = EDGE_SUCC (bb, 1);
2270 if (e0->flags & EDGE_COMPLEX)
2271 return e1->dest;
2272 if (e1->flags & EDGE_COMPLEX)
2273 return e0->dest;
2275 return bb;
2278 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2280 void
2281 notice_special_calls (gcall *call)
2283 int flags = gimple_call_flags (call);
2285 if (flags & ECF_MAY_BE_ALLOCA)
2286 cfun->calls_alloca = true;
2287 if (flags & ECF_RETURNS_TWICE)
2288 cfun->calls_setjmp = true;
2292 /* Clear flags set by notice_special_calls. Used by dead code removal
2293 to update the flags. */
2295 void
2296 clear_special_calls (void)
2298 cfun->calls_alloca = false;
2299 cfun->calls_setjmp = false;
2302 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2304 static void
2305 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2307 /* Since this block is no longer reachable, we can just delete all
2308 of its PHI nodes. */
2309 remove_phi_nodes (bb);
2311 /* Remove edges to BB's successors. */
2312 while (EDGE_COUNT (bb->succs) > 0)
2313 remove_edge (EDGE_SUCC (bb, 0));
2317 /* Remove statements of basic block BB. */
2319 static void
2320 remove_bb (basic_block bb)
2322 gimple_stmt_iterator i;
2324 if (dump_file)
2326 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2327 if (dump_flags & TDF_DETAILS)
2329 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2330 fprintf (dump_file, "\n");
2334 if (current_loops)
2336 class loop *loop = bb->loop_father;
2338 /* If a loop gets removed, clean up the information associated
2339 with it. */
2340 if (loop->latch == bb
2341 || loop->header == bb)
2342 free_numbers_of_iterations_estimates (loop);
2345 /* Remove all the instructions in the block. */
2346 if (bb_seq (bb) != NULL)
2348 /* Walk backwards so as to get a chance to substitute all
2349 released DEFs into debug stmts. See
2350 eliminate_unnecessary_stmts() in tree-ssa-dce.cc for more
2351 details. */
2352 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2354 gimple *stmt = gsi_stmt (i);
2355 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2356 if (label_stmt
2357 && (FORCED_LABEL (gimple_label_label (label_stmt))
2358 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2360 basic_block new_bb;
2361 gimple_stmt_iterator new_gsi;
2363 /* A non-reachable non-local label may still be referenced.
2364 But it no longer needs to carry the extra semantics of
2365 non-locality. */
2366 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2368 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2369 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2372 new_bb = bb->prev_bb;
2373 /* Don't move any labels into ENTRY block. */
2374 if (new_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
2376 new_bb = single_succ (new_bb);
2377 gcc_assert (new_bb != bb);
2379 if ((unsigned) bb->index < bb_to_omp_idx.length ()
2380 && ((unsigned) new_bb->index >= bb_to_omp_idx.length ()
2381 || (bb_to_omp_idx[bb->index]
2382 != bb_to_omp_idx[new_bb->index])))
2384 /* During cfg pass make sure to put orphaned labels
2385 into the right OMP region. */
2386 unsigned int i;
2387 int idx;
2388 new_bb = NULL;
2389 FOR_EACH_VEC_ELT (bb_to_omp_idx, i, idx)
2390 if (i >= NUM_FIXED_BLOCKS
2391 && idx == bb_to_omp_idx[bb->index]
2392 && i != (unsigned) bb->index)
2394 new_bb = BASIC_BLOCK_FOR_FN (cfun, i);
2395 break;
2397 if (new_bb == NULL)
2399 new_bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2400 gcc_assert (new_bb != bb);
2403 new_gsi = gsi_after_labels (new_bb);
2404 gsi_remove (&i, false);
2405 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2407 else
2409 /* Release SSA definitions. */
2410 release_defs (stmt);
2411 gsi_remove (&i, true);
2414 if (gsi_end_p (i))
2415 i = gsi_last_bb (bb);
2416 else
2417 gsi_prev (&i);
2421 if ((unsigned) bb->index < bb_to_omp_idx.length ())
2422 bb_to_omp_idx[bb->index] = -1;
2423 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2424 bb->il.gimple.seq = NULL;
2425 bb->il.gimple.phi_nodes = NULL;
2429 /* Given a basic block BB and a value VAL for use in the final statement
2430 of the block (if a GIMPLE_COND, GIMPLE_SWITCH, or computed goto), return
2431 the edge that will be taken out of the block.
2432 If VAL is NULL_TREE, then the current value of the final statement's
2433 predicate or index is used.
2434 If the value does not match a unique edge, NULL is returned. */
2436 edge
2437 find_taken_edge (basic_block bb, tree val)
2439 gimple *stmt;
2441 stmt = *gsi_last_bb (bb);
2443 /* Handle ENTRY and EXIT. */
2444 if (!stmt)
2447 else if (gimple_code (stmt) == GIMPLE_COND)
2448 return find_taken_edge_cond_expr (as_a <gcond *> (stmt), val);
2450 else if (gimple_code (stmt) == GIMPLE_SWITCH)
2451 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), val);
2453 else if (computed_goto_p (stmt))
2455 /* Only optimize if the argument is a label, if the argument is
2456 not a label then we cannot construct a proper CFG.
2458 It may be the case that we only need to allow the LABEL_REF to
2459 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2460 appear inside a LABEL_EXPR just to be safe. */
2461 if (val
2462 && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2463 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2464 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2467 /* Otherwise we only know the taken successor edge if it's unique. */
2468 return single_succ_p (bb) ? single_succ_edge (bb) : NULL;
2471 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2472 statement, determine which of the outgoing edges will be taken out of the
2473 block. Return NULL if either edge may be taken. */
2475 static edge
2476 find_taken_edge_computed_goto (basic_block bb, tree val)
2478 basic_block dest;
2479 edge e = NULL;
2481 dest = label_to_block (cfun, val);
2482 if (dest)
2483 e = find_edge (bb, dest);
2485 /* It's possible for find_edge to return NULL here on invalid code
2486 that abuses the labels-as-values extension (e.g. code that attempts to
2487 jump *between* functions via stored labels-as-values; PR 84136).
2488 If so, then we simply return that NULL for the edge.
2489 We don't currently have a way of detecting such invalid code, so we
2490 can't assert that it was the case when a NULL edge occurs here. */
2492 return e;
2495 /* Given COND_STMT and a constant value VAL for use as the predicate,
2496 determine which of the two edges will be taken out of
2497 the statement's block. Return NULL if either edge may be taken.
2498 If VAL is NULL_TREE, then the current value of COND_STMT's predicate
2499 is used. */
2501 static edge
2502 find_taken_edge_cond_expr (const gcond *cond_stmt, tree val)
2504 edge true_edge, false_edge;
2506 if (val == NULL_TREE)
2508 /* Use the current value of the predicate. */
2509 if (gimple_cond_true_p (cond_stmt))
2510 val = integer_one_node;
2511 else if (gimple_cond_false_p (cond_stmt))
2512 val = integer_zero_node;
2513 else
2514 return NULL;
2516 else if (TREE_CODE (val) != INTEGER_CST)
2517 return NULL;
2519 extract_true_false_edges_from_block (gimple_bb (cond_stmt),
2520 &true_edge, &false_edge);
2522 return (integer_zerop (val) ? false_edge : true_edge);
2525 /* Given SWITCH_STMT and an INTEGER_CST VAL for use as the index, determine
2526 which edge will be taken out of the statement's block. Return NULL if any
2527 edge may be taken.
2528 If VAL is NULL_TREE, then the current value of SWITCH_STMT's index
2529 is used. */
2531 edge
2532 find_taken_edge_switch_expr (const gswitch *switch_stmt, tree val)
2534 basic_block dest_bb;
2535 edge e;
2536 tree taken_case;
2538 if (gimple_switch_num_labels (switch_stmt) == 1)
2539 taken_case = gimple_switch_default_label (switch_stmt);
2540 else
2542 if (val == NULL_TREE)
2543 val = gimple_switch_index (switch_stmt);
2544 if (TREE_CODE (val) != INTEGER_CST)
2545 return NULL;
2546 else
2547 taken_case = find_case_label_for_value (switch_stmt, val);
2549 dest_bb = label_to_block (cfun, CASE_LABEL (taken_case));
2551 e = find_edge (gimple_bb (switch_stmt), dest_bb);
2552 gcc_assert (e);
2553 return e;
2557 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2558 We can make optimal use here of the fact that the case labels are
2559 sorted: We can do a binary search for a case matching VAL. */
2561 tree
2562 find_case_label_for_value (const gswitch *switch_stmt, tree val)
2564 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2565 tree default_case = gimple_switch_default_label (switch_stmt);
2567 for (low = 0, high = n; high - low > 1; )
2569 size_t i = (high + low) / 2;
2570 tree t = gimple_switch_label (switch_stmt, i);
2571 int cmp;
2573 /* Cache the result of comparing CASE_LOW and val. */
2574 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2576 if (cmp > 0)
2577 high = i;
2578 else
2579 low = i;
2581 if (CASE_HIGH (t) == NULL)
2583 /* A singe-valued case label. */
2584 if (cmp == 0)
2585 return t;
2587 else
2589 /* A case range. We can only handle integer ranges. */
2590 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2591 return t;
2595 return default_case;
2599 /* Dump a basic block on stderr. */
2601 void
2602 gimple_debug_bb (basic_block bb)
2604 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2608 /* Dump basic block with index N on stderr. */
2610 basic_block
2611 gimple_debug_bb_n (int n)
2613 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2614 return BASIC_BLOCK_FOR_FN (cfun, n);
2618 /* Dump the CFG on stderr.
2620 FLAGS are the same used by the tree dumping functions
2621 (see TDF_* in dumpfile.h). */
2623 void
2624 gimple_debug_cfg (dump_flags_t flags)
2626 gimple_dump_cfg (stderr, flags);
2630 /* Dump the program showing basic block boundaries on the given FILE.
2632 FLAGS are the same used by the tree dumping functions (see TDF_* in
2633 tree.h). */
2635 void
2636 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2638 if (flags & TDF_DETAILS)
2640 dump_function_header (file, current_function_decl, flags);
2641 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2642 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2643 last_basic_block_for_fn (cfun));
2645 brief_dump_cfg (file, flags);
2646 fprintf (file, "\n");
2649 if (flags & TDF_STATS)
2650 dump_cfg_stats (file);
2652 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2656 /* Dump CFG statistics on FILE. */
2658 void
2659 dump_cfg_stats (FILE *file)
2661 static long max_num_merged_labels = 0;
2662 unsigned long size, total = 0;
2663 long num_edges;
2664 basic_block bb;
2665 const char * const fmt_str = "%-30s%-13s%12s\n";
2666 const char * const fmt_str_1 = "%-30s%13d" PRsa (11) "\n";
2667 const char * const fmt_str_2 = "%-30s%13ld" PRsa (11) "\n";
2668 const char * const fmt_str_3 = "%-43s" PRsa (11) "\n";
2669 const char *funcname = current_function_name ();
2671 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2673 fprintf (file, "---------------------------------------------------------\n");
2674 fprintf (file, fmt_str, "", " Number of ", "Memory");
2675 fprintf (file, fmt_str, "", " instances ", "used ");
2676 fprintf (file, "---------------------------------------------------------\n");
2678 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2679 total += size;
2680 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2681 SIZE_AMOUNT (size));
2683 num_edges = 0;
2684 FOR_EACH_BB_FN (bb, cfun)
2685 num_edges += EDGE_COUNT (bb->succs);
2686 size = num_edges * sizeof (class edge_def);
2687 total += size;
2688 fprintf (file, fmt_str_2, "Edges", num_edges, SIZE_AMOUNT (size));
2690 fprintf (file, "---------------------------------------------------------\n");
2691 fprintf (file, fmt_str_3, "Total memory used by CFG data",
2692 SIZE_AMOUNT (total));
2693 fprintf (file, "---------------------------------------------------------\n");
2694 fprintf (file, "\n");
2696 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2697 max_num_merged_labels = cfg_stats.num_merged_labels;
2699 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2700 cfg_stats.num_merged_labels, max_num_merged_labels);
2702 fprintf (file, "\n");
2706 /* Dump CFG statistics on stderr. Keep extern so that it's always
2707 linked in the final executable. */
2709 DEBUG_FUNCTION void
2710 debug_cfg_stats (void)
2712 dump_cfg_stats (stderr);
2715 /*---------------------------------------------------------------------------
2716 Miscellaneous helpers
2717 ---------------------------------------------------------------------------*/
2719 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2720 flow. Transfers of control flow associated with EH are excluded. */
2722 static bool
2723 call_can_make_abnormal_goto (gimple *t)
2725 /* If the function has no non-local labels, then a call cannot make an
2726 abnormal transfer of control. */
2727 if (!cfun->has_nonlocal_label
2728 && !cfun->calls_setjmp)
2729 return false;
2731 /* Likewise if the call has no side effects. */
2732 if (!gimple_has_side_effects (t))
2733 return false;
2735 /* Likewise if the called function is leaf. */
2736 if (gimple_call_flags (t) & ECF_LEAF)
2737 return false;
2739 return true;
2743 /* Return true if T can make an abnormal transfer of control flow.
2744 Transfers of control flow associated with EH are excluded. */
2746 bool
2747 stmt_can_make_abnormal_goto (gimple *t)
2749 if (computed_goto_p (t))
2750 return true;
2751 if (is_gimple_call (t))
2752 return call_can_make_abnormal_goto (t);
2753 return false;
2757 /* Return true if T represents a stmt that always transfers control. */
2759 bool
2760 is_ctrl_stmt (gimple *t)
2762 switch (gimple_code (t))
2764 case GIMPLE_COND:
2765 case GIMPLE_SWITCH:
2766 case GIMPLE_GOTO:
2767 case GIMPLE_RETURN:
2768 case GIMPLE_RESX:
2769 return true;
2770 default:
2771 return false;
2776 /* Return true if T is a statement that may alter the flow of control
2777 (e.g., a call to a non-returning function). */
2779 bool
2780 is_ctrl_altering_stmt (gimple *t)
2782 gcc_assert (t);
2784 switch (gimple_code (t))
2786 case GIMPLE_CALL:
2787 /* Per stmt call flag indicates whether the call could alter
2788 controlflow. */
2789 if (gimple_call_ctrl_altering_p (t))
2790 return true;
2791 break;
2793 case GIMPLE_EH_DISPATCH:
2794 /* EH_DISPATCH branches to the individual catch handlers at
2795 this level of a try or allowed-exceptions region. It can
2796 fallthru to the next statement as well. */
2797 return true;
2799 case GIMPLE_ASM:
2800 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2801 return true;
2802 break;
2804 CASE_GIMPLE_OMP:
2805 /* OpenMP directives alter control flow. */
2806 return true;
2808 case GIMPLE_TRANSACTION:
2809 /* A transaction start alters control flow. */
2810 return true;
2812 default:
2813 break;
2816 /* If a statement can throw, it alters control flow. */
2817 return stmt_can_throw_internal (cfun, t);
2821 /* Return true if T is a simple local goto. */
2823 bool
2824 simple_goto_p (gimple *t)
2826 return (gimple_code (t) == GIMPLE_GOTO
2827 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2831 /* Return true if STMT should start a new basic block. PREV_STMT is
2832 the statement preceding STMT. It is used when STMT is a label or a
2833 case label. Labels should only start a new basic block if their
2834 previous statement wasn't a label. Otherwise, sequence of labels
2835 would generate unnecessary basic blocks that only contain a single
2836 label. */
2838 static inline bool
2839 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2841 if (stmt == NULL)
2842 return false;
2844 /* PREV_STMT is only set to a debug stmt if the debug stmt is before
2845 any nondebug stmts in the block. We don't want to start another
2846 block in this case: the debug stmt will already have started the
2847 one STMT would start if we weren't outputting debug stmts. */
2848 if (prev_stmt && is_gimple_debug (prev_stmt))
2849 return false;
2851 /* Labels start a new basic block only if the preceding statement
2852 wasn't a label of the same type. This prevents the creation of
2853 consecutive blocks that have nothing but a single label. */
2854 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2856 /* Nonlocal and computed GOTO targets always start a new block. */
2857 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2858 || FORCED_LABEL (gimple_label_label (label_stmt)))
2859 return true;
2861 if (glabel *plabel = safe_dyn_cast <glabel *> (prev_stmt))
2863 if (DECL_NONLOCAL (gimple_label_label (plabel))
2864 || !DECL_ARTIFICIAL (gimple_label_label (plabel)))
2865 return true;
2867 cfg_stats.num_merged_labels++;
2868 return false;
2870 else
2871 return true;
2873 else if (gimple_code (stmt) == GIMPLE_CALL)
2875 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2876 /* setjmp acts similar to a nonlocal GOTO target and thus should
2877 start a new block. */
2878 return true;
2879 if (gimple_call_internal_p (stmt, IFN_PHI)
2880 && prev_stmt
2881 && gimple_code (prev_stmt) != GIMPLE_LABEL
2882 && (gimple_code (prev_stmt) != GIMPLE_CALL
2883 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2884 /* PHI nodes start a new block unless preceeded by a label
2885 or another PHI. */
2886 return true;
2889 return false;
2893 /* Return true if T should end a basic block. */
2895 bool
2896 stmt_ends_bb_p (gimple *t)
2898 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2901 /* Remove block annotations and other data structures. */
2903 void
2904 delete_tree_cfg_annotations (struct function *fn)
2906 vec_free (label_to_block_map_for_fn (fn));
2909 /* Return the virtual phi in BB. */
2911 gphi *
2912 get_virtual_phi (basic_block bb)
2914 for (gphi_iterator gsi = gsi_start_phis (bb);
2915 !gsi_end_p (gsi);
2916 gsi_next (&gsi))
2918 gphi *phi = gsi.phi ();
2920 if (virtual_operand_p (PHI_RESULT (phi)))
2921 return phi;
2924 return NULL;
2927 /* Return the first statement in basic block BB. */
2929 gimple *
2930 first_stmt (basic_block bb)
2932 gimple_stmt_iterator i = gsi_start_bb (bb);
2933 gimple *stmt = NULL;
2935 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2937 gsi_next (&i);
2938 stmt = NULL;
2940 return stmt;
2943 /* Return the first non-label statement in basic block BB. */
2945 static gimple *
2946 first_non_label_stmt (basic_block bb)
2948 gimple_stmt_iterator i = gsi_start_bb (bb);
2949 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2950 gsi_next (&i);
2951 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2954 /* Return the last statement in basic block BB. */
2956 gimple *
2957 last_nondebug_stmt (basic_block bb)
2959 gimple_stmt_iterator i = gsi_last_bb (bb);
2960 gimple *stmt = NULL;
2962 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2964 gsi_prev (&i);
2965 stmt = NULL;
2967 return stmt;
2970 /* Return the last statement of an otherwise empty block. Return NULL
2971 if the block is totally empty, or if it contains more than one
2972 statement. */
2974 gimple *
2975 last_and_only_stmt (basic_block bb)
2977 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2978 gimple *last, *prev;
2980 if (gsi_end_p (i))
2981 return NULL;
2983 last = gsi_stmt (i);
2984 gsi_prev_nondebug (&i);
2985 if (gsi_end_p (i))
2986 return last;
2988 /* Empty statements should no longer appear in the instruction stream.
2989 Everything that might have appeared before should be deleted by
2990 remove_useless_stmts, and the optimizers should just gsi_remove
2991 instead of smashing with build_empty_stmt.
2993 Thus the only thing that should appear here in a block containing
2994 one executable statement is a label. */
2995 prev = gsi_stmt (i);
2996 if (gimple_code (prev) == GIMPLE_LABEL)
2997 return last;
2998 else
2999 return NULL;
3002 /* Returns the basic block after which the new basic block created
3003 by splitting edge EDGE_IN should be placed. Tries to keep the new block
3004 near its "logical" location. This is of most help to humans looking
3005 at debugging dumps. */
3007 basic_block
3008 split_edge_bb_loc (edge edge_in)
3010 basic_block dest = edge_in->dest;
3011 basic_block dest_prev = dest->prev_bb;
3013 if (dest_prev)
3015 edge e = find_edge (dest_prev, dest);
3016 if (e && !(e->flags & EDGE_COMPLEX))
3017 return edge_in->src;
3019 return dest_prev;
3022 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3023 Abort on abnormal edges. */
3025 static basic_block
3026 gimple_split_edge (edge edge_in)
3028 basic_block new_bb, after_bb, dest;
3029 edge new_edge, e;
3031 /* Abnormal edges cannot be split. */
3032 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3034 dest = edge_in->dest;
3036 after_bb = split_edge_bb_loc (edge_in);
3038 new_bb = create_empty_bb (after_bb);
3039 new_bb->count = edge_in->count ();
3041 /* We want to avoid re-allocating PHIs when we first
3042 add the fallthru edge from new_bb to dest but we also
3043 want to avoid changing PHI argument order when
3044 first redirecting edge_in away from dest. The former
3045 avoids changing PHI argument order by adding them
3046 last and then the redirection swapping it back into
3047 place by means of unordered remove.
3048 So hack around things by temporarily removing all PHIs
3049 from the destination during the edge redirection and then
3050 making sure the edges stay in order. */
3051 gimple_seq saved_phis = phi_nodes (dest);
3052 unsigned old_dest_idx = edge_in->dest_idx;
3053 set_phi_nodes (dest, NULL);
3054 new_edge = make_single_succ_edge (new_bb, dest, EDGE_FALLTHRU);
3055 e = redirect_edge_and_branch (edge_in, new_bb);
3056 gcc_assert (e == edge_in && new_edge->dest_idx == old_dest_idx);
3057 /* set_phi_nodes sets the BB of the PHI nodes, so do it manually here. */
3058 dest->il.gimple.phi_nodes = saved_phis;
3060 return new_bb;
3064 /* Verify properties of the address expression T whose base should be
3065 TREE_ADDRESSABLE if VERIFY_ADDRESSABLE is true. */
3067 static bool
3068 verify_address (tree t, bool verify_addressable)
3070 bool old_constant;
3071 bool old_side_effects;
3072 bool new_constant;
3073 bool new_side_effects;
3075 old_constant = TREE_CONSTANT (t);
3076 old_side_effects = TREE_SIDE_EFFECTS (t);
3078 recompute_tree_invariant_for_addr_expr (t);
3079 new_side_effects = TREE_SIDE_EFFECTS (t);
3080 new_constant = TREE_CONSTANT (t);
3082 if (old_constant != new_constant)
3084 error ("constant not recomputed when %<ADDR_EXPR%> changed");
3085 return true;
3087 if (old_side_effects != new_side_effects)
3089 error ("side effects not recomputed when %<ADDR_EXPR%> changed");
3090 return true;
3093 tree base = TREE_OPERAND (t, 0);
3094 while (handled_component_p (base))
3095 base = TREE_OPERAND (base, 0);
3097 if (!(VAR_P (base)
3098 || TREE_CODE (base) == PARM_DECL
3099 || TREE_CODE (base) == RESULT_DECL))
3100 return false;
3102 if (verify_addressable && !TREE_ADDRESSABLE (base))
3104 error ("address taken but %<TREE_ADDRESSABLE%> bit not set");
3105 return true;
3108 return false;
3112 /* Verify if EXPR is a valid GIMPLE reference expression. If
3113 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3114 if there is an error, otherwise false. */
3116 static bool
3117 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3119 const char *code_name = get_tree_code_name (TREE_CODE (expr));
3121 if (TREE_CODE (expr) == REALPART_EXPR
3122 || TREE_CODE (expr) == IMAGPART_EXPR
3123 || TREE_CODE (expr) == BIT_FIELD_REF
3124 || TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3126 tree op = TREE_OPERAND (expr, 0);
3127 if (TREE_CODE (expr) != VIEW_CONVERT_EXPR
3128 && !is_gimple_reg_type (TREE_TYPE (expr)))
3130 error ("non-scalar %qs", code_name);
3131 return true;
3134 if (TREE_CODE (expr) == BIT_FIELD_REF)
3136 tree t1 = TREE_OPERAND (expr, 1);
3137 tree t2 = TREE_OPERAND (expr, 2);
3138 poly_uint64 size, bitpos;
3139 if (!poly_int_tree_p (t1, &size)
3140 || !poly_int_tree_p (t2, &bitpos)
3141 || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
3142 || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
3144 error ("invalid position or size operand to %qs", code_name);
3145 return true;
3147 if (INTEGRAL_TYPE_P (TREE_TYPE (expr))
3148 && maybe_ne (TYPE_PRECISION (TREE_TYPE (expr)), size))
3150 error ("integral result type precision does not match "
3151 "field size of %qs", code_name);
3152 return true;
3154 else if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
3155 && TYPE_MODE (TREE_TYPE (expr)) != BLKmode
3156 && maybe_ne (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (expr))),
3157 size))
3159 error ("mode size of non-integral result does not "
3160 "match field size of %qs",
3161 code_name);
3162 return true;
3164 if (INTEGRAL_TYPE_P (TREE_TYPE (op))
3165 && !type_has_mode_precision_p (TREE_TYPE (op)))
3167 error ("%qs of non-mode-precision operand", code_name);
3168 return true;
3170 if (!AGGREGATE_TYPE_P (TREE_TYPE (op))
3171 && maybe_gt (size + bitpos,
3172 tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (op)))))
3174 error ("position plus size exceeds size of referenced object in "
3175 "%qs", code_name);
3176 return true;
3180 if ((TREE_CODE (expr) == REALPART_EXPR
3181 || TREE_CODE (expr) == IMAGPART_EXPR)
3182 && !useless_type_conversion_p (TREE_TYPE (expr),
3183 TREE_TYPE (TREE_TYPE (op))))
3185 error ("type mismatch in %qs reference", code_name);
3186 debug_generic_stmt (TREE_TYPE (expr));
3187 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3188 return true;
3191 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3193 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3194 that their operand is not a register an invariant when
3195 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3196 bug). Otherwise there is nothing to verify, gross mismatches at
3197 most invoke undefined behavior. */
3198 if (require_lvalue
3199 && (is_gimple_reg (op) || is_gimple_min_invariant (op)))
3201 error ("conversion of %qs on the left hand side of %qs",
3202 get_tree_code_name (TREE_CODE (op)), code_name);
3203 debug_generic_stmt (expr);
3204 return true;
3206 else if (is_gimple_reg (op)
3207 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3209 error ("conversion of register to a different size in %qs",
3210 code_name);
3211 debug_generic_stmt (expr);
3212 return true;
3216 expr = op;
3219 bool require_non_reg = false;
3220 while (handled_component_p (expr))
3222 require_non_reg = true;
3223 code_name = get_tree_code_name (TREE_CODE (expr));
3225 if (TREE_CODE (expr) == REALPART_EXPR
3226 || TREE_CODE (expr) == IMAGPART_EXPR
3227 || TREE_CODE (expr) == BIT_FIELD_REF)
3229 error ("non-top-level %qs", code_name);
3230 return true;
3233 tree op = TREE_OPERAND (expr, 0);
3235 if (TREE_CODE (expr) == ARRAY_REF
3236 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3238 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3239 || (TREE_OPERAND (expr, 2)
3240 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3241 || (TREE_OPERAND (expr, 3)
3242 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3244 error ("invalid operands to %qs", code_name);
3245 debug_generic_stmt (expr);
3246 return true;
3250 /* Verify if the reference array element types are compatible. */
3251 if (TREE_CODE (expr) == ARRAY_REF
3252 && !useless_type_conversion_p (TREE_TYPE (expr),
3253 TREE_TYPE (TREE_TYPE (op))))
3255 error ("type mismatch in %qs", code_name);
3256 debug_generic_stmt (TREE_TYPE (expr));
3257 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3258 return true;
3260 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3261 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3262 TREE_TYPE (TREE_TYPE (op))))
3264 error ("type mismatch in %qs", code_name);
3265 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3266 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3267 return true;
3270 if (TREE_CODE (expr) == COMPONENT_REF)
3272 if (TREE_OPERAND (expr, 2)
3273 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3275 error ("invalid %qs offset operator", code_name);
3276 return true;
3278 if (!useless_type_conversion_p (TREE_TYPE (expr),
3279 TREE_TYPE (TREE_OPERAND (expr, 1))))
3281 error ("type mismatch in %qs", code_name);
3282 debug_generic_stmt (TREE_TYPE (expr));
3283 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3284 return true;
3288 expr = op;
3291 code_name = get_tree_code_name (TREE_CODE (expr));
3293 if (TREE_CODE (expr) == MEM_REF)
3295 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0))
3296 || (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
3297 && verify_address (TREE_OPERAND (expr, 0), false)))
3299 error ("invalid address operand in %qs", code_name);
3300 debug_generic_stmt (expr);
3301 return true;
3303 if (!poly_int_tree_p (TREE_OPERAND (expr, 1))
3304 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3306 error ("invalid offset operand in %qs", code_name);
3307 debug_generic_stmt (expr);
3308 return true;
3310 if (MR_DEPENDENCE_CLIQUE (expr) != 0
3311 && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3313 error ("invalid clique in %qs", code_name);
3314 debug_generic_stmt (expr);
3315 return true;
3318 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3320 if (!TMR_BASE (expr)
3321 || !is_gimple_mem_ref_addr (TMR_BASE (expr))
3322 || (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
3323 && verify_address (TMR_BASE (expr), false)))
3325 error ("invalid address operand in %qs", code_name);
3326 return true;
3328 if (!TMR_OFFSET (expr)
3329 || !poly_int_tree_p (TMR_OFFSET (expr))
3330 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3332 error ("invalid offset operand in %qs", code_name);
3333 debug_generic_stmt (expr);
3334 return true;
3336 if (MR_DEPENDENCE_CLIQUE (expr) != 0
3337 && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3339 error ("invalid clique in %qs", code_name);
3340 debug_generic_stmt (expr);
3341 return true;
3344 else if (INDIRECT_REF_P (expr))
3346 error ("%qs in gimple IL", code_name);
3347 debug_generic_stmt (expr);
3348 return true;
3350 else if (require_non_reg
3351 && (is_gimple_reg (expr)
3352 || (is_gimple_min_invariant (expr)
3353 /* STRING_CSTs are representatives of the string table
3354 entry which lives in memory. */
3355 && TREE_CODE (expr) != STRING_CST)))
3357 error ("%qs as base where non-register is required", code_name);
3358 debug_generic_stmt (expr);
3359 return true;
3362 if (!require_lvalue
3363 && (is_gimple_reg (expr) || is_gimple_min_invariant (expr)))
3364 return false;
3366 if (TREE_CODE (expr) != SSA_NAME && is_gimple_id (expr))
3367 return false;
3369 if (TREE_CODE (expr) != TARGET_MEM_REF
3370 && TREE_CODE (expr) != MEM_REF)
3372 error ("invalid expression for min lvalue");
3373 return true;
3376 return false;
3379 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3380 list of pointer-to types that is trivially convertible to DEST. */
3382 static bool
3383 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3385 tree src;
3387 if (!TYPE_POINTER_TO (src_obj))
3388 return true;
3390 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3391 if (useless_type_conversion_p (dest, src))
3392 return true;
3394 return false;
3397 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3398 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3400 static bool
3401 valid_fixed_convert_types_p (tree type1, tree type2)
3403 return (FIXED_POINT_TYPE_P (type1)
3404 && (INTEGRAL_TYPE_P (type2)
3405 || SCALAR_FLOAT_TYPE_P (type2)
3406 || FIXED_POINT_TYPE_P (type2)));
3409 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3410 is a problem, otherwise false. */
3412 static bool
3413 verify_gimple_call (gcall *stmt)
3415 tree fn = gimple_call_fn (stmt);
3416 tree fntype, fndecl;
3417 unsigned i;
3419 if (gimple_call_internal_p (stmt))
3421 if (fn)
3423 error ("gimple call has two targets");
3424 debug_generic_stmt (fn);
3425 return true;
3428 else
3430 if (!fn)
3432 error ("gimple call has no target");
3433 return true;
3437 if (fn && !is_gimple_call_addr (fn))
3439 error ("invalid function in gimple call");
3440 debug_generic_stmt (fn);
3441 return true;
3444 if (fn
3445 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3446 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3447 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3449 error ("non-function in gimple call");
3450 return true;
3453 fndecl = gimple_call_fndecl (stmt);
3454 if (fndecl
3455 && TREE_CODE (fndecl) == FUNCTION_DECL
3456 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3457 && !DECL_PURE_P (fndecl)
3458 && !TREE_READONLY (fndecl))
3460 error ("invalid pure const state for function");
3461 return true;
3464 tree lhs = gimple_call_lhs (stmt);
3465 if (lhs
3466 && (!is_gimple_reg (lhs)
3467 && (!is_gimple_lvalue (lhs)
3468 || verify_types_in_gimple_reference
3469 (TREE_CODE (lhs) == WITH_SIZE_EXPR
3470 ? TREE_OPERAND (lhs, 0) : lhs, true))))
3472 error ("invalid LHS in gimple call");
3473 return true;
3476 if (gimple_call_ctrl_altering_p (stmt)
3477 && gimple_call_noreturn_p (stmt)
3478 && should_remove_lhs_p (lhs))
3480 error ("LHS in %<noreturn%> call");
3481 return true;
3484 fntype = gimple_call_fntype (stmt);
3485 if (fntype
3486 && lhs
3487 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3488 /* ??? At least C++ misses conversions at assignments from
3489 void * call results.
3490 For now simply allow arbitrary pointer type conversions. */
3491 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3492 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3494 error ("invalid conversion in gimple call");
3495 debug_generic_stmt (TREE_TYPE (lhs));
3496 debug_generic_stmt (TREE_TYPE (fntype));
3497 return true;
3500 if (gimple_call_chain (stmt)
3501 && !is_gimple_val (gimple_call_chain (stmt)))
3503 error ("invalid static chain in gimple call");
3504 debug_generic_stmt (gimple_call_chain (stmt));
3505 return true;
3508 /* If there is a static chain argument, the call should either be
3509 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3510 if (gimple_call_chain (stmt)
3511 && fndecl
3512 && !DECL_STATIC_CHAIN (fndecl))
3514 error ("static chain with function that doesn%'t use one");
3515 return true;
3518 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3520 switch (DECL_FUNCTION_CODE (fndecl))
3522 case BUILT_IN_UNREACHABLE:
3523 case BUILT_IN_UNREACHABLE_TRAP:
3524 case BUILT_IN_TRAP:
3525 if (gimple_call_num_args (stmt) > 0)
3527 /* Built-in unreachable with parameters might not be caught by
3528 undefined behavior sanitizer. Front-ends do check users do not
3529 call them that way but we also produce calls to
3530 __builtin_unreachable internally, for example when IPA figures
3531 out a call cannot happen in a legal program. In such cases,
3532 we must make sure arguments are stripped off. */
3533 error ("%<__builtin_unreachable%> or %<__builtin_trap%> call "
3534 "with arguments");
3535 return true;
3537 break;
3538 default:
3539 break;
3543 /* For a call to .DEFERRED_INIT,
3544 LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, NAME of the DECL)
3545 we should guarantee that when the 1st argument is a constant, it should
3546 be the same as the size of the LHS. */
3548 if (gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
3550 tree size_of_arg0 = gimple_call_arg (stmt, 0);
3551 tree size_of_lhs = TYPE_SIZE_UNIT (TREE_TYPE (lhs));
3553 if (TREE_CODE (lhs) == SSA_NAME)
3554 lhs = SSA_NAME_VAR (lhs);
3556 poly_uint64 size_from_arg0, size_from_lhs;
3557 bool is_constant_size_arg0 = poly_int_tree_p (size_of_arg0,
3558 &size_from_arg0);
3559 bool is_constant_size_lhs = poly_int_tree_p (size_of_lhs,
3560 &size_from_lhs);
3561 if (is_constant_size_arg0 && is_constant_size_lhs)
3562 if (maybe_ne (size_from_arg0, size_from_lhs))
3564 error ("%<DEFERRED_INIT%> calls should have same "
3565 "constant size for the first argument and LHS");
3566 return true;
3570 /* ??? The C frontend passes unpromoted arguments in case it
3571 didn't see a function declaration before the call. So for now
3572 leave the call arguments mostly unverified. Once we gimplify
3573 unit-at-a-time we have a chance to fix this. */
3574 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3576 tree arg = gimple_call_arg (stmt, i);
3577 if ((is_gimple_reg_type (TREE_TYPE (arg))
3578 && !is_gimple_val (arg))
3579 || (!is_gimple_reg_type (TREE_TYPE (arg))
3580 && !is_gimple_lvalue (arg)))
3582 error ("invalid argument to gimple call");
3583 debug_generic_expr (arg);
3584 return true;
3586 if (!is_gimple_reg (arg))
3588 if (TREE_CODE (arg) == WITH_SIZE_EXPR)
3589 arg = TREE_OPERAND (arg, 0);
3590 if (verify_types_in_gimple_reference (arg, false))
3591 return true;
3595 return false;
3598 /* Verifies the gimple comparison with the result type TYPE and
3599 the operands OP0 and OP1, comparison code is CODE. */
3601 static bool
3602 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3604 tree op0_type = TREE_TYPE (op0);
3605 tree op1_type = TREE_TYPE (op1);
3607 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3609 error ("invalid operands in gimple comparison");
3610 return true;
3613 /* For comparisons we do not have the operations type as the
3614 effective type the comparison is carried out in. Instead
3615 we require that either the first operand is trivially
3616 convertible into the second, or the other way around. */
3617 if (!useless_type_conversion_p (op0_type, op1_type)
3618 && !useless_type_conversion_p (op1_type, op0_type))
3620 error ("mismatching comparison operand types");
3621 debug_generic_expr (op0_type);
3622 debug_generic_expr (op1_type);
3623 return true;
3626 /* The resulting type of a comparison may be an effective boolean type. */
3627 if (INTEGRAL_TYPE_P (type)
3628 && (TREE_CODE (type) == BOOLEAN_TYPE
3629 || TYPE_PRECISION (type) == 1))
3631 if ((VECTOR_TYPE_P (op0_type)
3632 || VECTOR_TYPE_P (op1_type))
3633 && code != EQ_EXPR && code != NE_EXPR
3634 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3635 && !VECTOR_INTEGER_TYPE_P (op0_type))
3637 error ("unsupported operation or type for vector comparison"
3638 " returning a boolean");
3639 debug_generic_expr (op0_type);
3640 debug_generic_expr (op1_type);
3641 return true;
3644 /* Or a boolean vector type with the same element count
3645 as the comparison operand types. */
3646 else if (VECTOR_TYPE_P (type)
3647 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3649 if (TREE_CODE (op0_type) != VECTOR_TYPE
3650 || TREE_CODE (op1_type) != VECTOR_TYPE)
3652 error ("non-vector operands in vector comparison");
3653 debug_generic_expr (op0_type);
3654 debug_generic_expr (op1_type);
3655 return true;
3658 if (maybe_ne (TYPE_VECTOR_SUBPARTS (type),
3659 TYPE_VECTOR_SUBPARTS (op0_type)))
3661 error ("invalid vector comparison resulting type");
3662 debug_generic_expr (type);
3663 return true;
3666 else
3668 error ("bogus comparison result type");
3669 debug_generic_expr (type);
3670 return true;
3673 return false;
3676 /* Verify a gimple assignment statement STMT with an unary rhs.
3677 Returns true if anything is wrong. */
3679 static bool
3680 verify_gimple_assign_unary (gassign *stmt)
3682 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3683 tree lhs = gimple_assign_lhs (stmt);
3684 tree lhs_type = TREE_TYPE (lhs);
3685 tree rhs1 = gimple_assign_rhs1 (stmt);
3686 tree rhs1_type = TREE_TYPE (rhs1);
3688 if (!is_gimple_reg (lhs))
3690 error ("non-register as LHS of unary operation");
3691 return true;
3694 if (!is_gimple_val (rhs1))
3696 error ("invalid operand in unary operation");
3697 return true;
3700 const char* const code_name = get_tree_code_name (rhs_code);
3702 /* First handle conversions. */
3703 switch (rhs_code)
3705 CASE_CONVERT:
3707 /* Allow conversions between vectors with the same number of elements,
3708 provided that the conversion is OK for the element types too. */
3709 if (VECTOR_TYPE_P (lhs_type)
3710 && VECTOR_TYPE_P (rhs1_type)
3711 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
3712 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3714 lhs_type = TREE_TYPE (lhs_type);
3715 rhs1_type = TREE_TYPE (rhs1_type);
3717 else if (VECTOR_TYPE_P (lhs_type) || VECTOR_TYPE_P (rhs1_type))
3719 error ("invalid vector types in nop conversion");
3720 debug_generic_expr (lhs_type);
3721 debug_generic_expr (rhs1_type);
3722 return true;
3725 /* Allow conversions from pointer type to integral type only if
3726 there is no sign or zero extension involved.
3727 For targets were the precision of ptrofftype doesn't match that
3728 of pointers we allow conversions to types where
3729 POINTERS_EXTEND_UNSIGNED specifies how that works. */
3730 if ((POINTER_TYPE_P (lhs_type)
3731 && INTEGRAL_TYPE_P (rhs1_type))
3732 || (POINTER_TYPE_P (rhs1_type)
3733 && INTEGRAL_TYPE_P (lhs_type)
3734 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3735 #if defined(POINTERS_EXTEND_UNSIGNED)
3736 || (TYPE_MODE (rhs1_type) == ptr_mode
3737 && (TYPE_PRECISION (lhs_type)
3738 == BITS_PER_WORD /* word_mode */
3739 || (TYPE_PRECISION (lhs_type)
3740 == GET_MODE_PRECISION (Pmode))))
3741 #endif
3743 return false;
3745 /* Allow conversion from integral to offset type and vice versa. */
3746 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3747 && INTEGRAL_TYPE_P (rhs1_type))
3748 || (INTEGRAL_TYPE_P (lhs_type)
3749 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3750 return false;
3752 /* Otherwise assert we are converting between types of the
3753 same kind. */
3754 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3756 error ("invalid types in nop conversion");
3757 debug_generic_expr (lhs_type);
3758 debug_generic_expr (rhs1_type);
3759 return true;
3762 return false;
3765 case ADDR_SPACE_CONVERT_EXPR:
3767 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3768 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3769 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3771 error ("invalid types in address space conversion");
3772 debug_generic_expr (lhs_type);
3773 debug_generic_expr (rhs1_type);
3774 return true;
3777 return false;
3780 case FIXED_CONVERT_EXPR:
3782 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3783 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3785 error ("invalid types in fixed-point conversion");
3786 debug_generic_expr (lhs_type);
3787 debug_generic_expr (rhs1_type);
3788 return true;
3791 return false;
3794 case FLOAT_EXPR:
3796 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3797 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3798 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3800 error ("invalid types in conversion to floating-point");
3801 debug_generic_expr (lhs_type);
3802 debug_generic_expr (rhs1_type);
3803 return true;
3806 return false;
3809 case FIX_TRUNC_EXPR:
3811 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3812 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3813 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3815 error ("invalid types in conversion to integer");
3816 debug_generic_expr (lhs_type);
3817 debug_generic_expr (rhs1_type);
3818 return true;
3821 return false;
3824 case VEC_UNPACK_HI_EXPR:
3825 case VEC_UNPACK_LO_EXPR:
3826 case VEC_UNPACK_FLOAT_HI_EXPR:
3827 case VEC_UNPACK_FLOAT_LO_EXPR:
3828 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
3829 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
3830 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3831 || TREE_CODE (lhs_type) != VECTOR_TYPE
3832 || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3833 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type)))
3834 || (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3835 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3836 || ((rhs_code == VEC_UNPACK_HI_EXPR
3837 || rhs_code == VEC_UNPACK_LO_EXPR)
3838 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3839 != INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3840 || ((rhs_code == VEC_UNPACK_FLOAT_HI_EXPR
3841 || rhs_code == VEC_UNPACK_FLOAT_LO_EXPR)
3842 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3843 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))))
3844 || ((rhs_code == VEC_UNPACK_FIX_TRUNC_HI_EXPR
3845 || rhs_code == VEC_UNPACK_FIX_TRUNC_LO_EXPR)
3846 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3847 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))))
3848 || (maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
3849 2 * GET_MODE_SIZE (element_mode (rhs1_type)))
3850 && (!VECTOR_BOOLEAN_TYPE_P (lhs_type)
3851 || !VECTOR_BOOLEAN_TYPE_P (rhs1_type)))
3852 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (lhs_type),
3853 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3855 error ("type mismatch in %qs expression", code_name);
3856 debug_generic_expr (lhs_type);
3857 debug_generic_expr (rhs1_type);
3858 return true;
3861 return false;
3863 case NEGATE_EXPR:
3864 case ABS_EXPR:
3865 case BIT_NOT_EXPR:
3866 case PAREN_EXPR:
3867 case CONJ_EXPR:
3868 /* Disallow pointer and offset types for many of the unary gimple. */
3869 if (POINTER_TYPE_P (lhs_type)
3870 || TREE_CODE (lhs_type) == OFFSET_TYPE)
3872 error ("invalid types for %qs", code_name);
3873 debug_generic_expr (lhs_type);
3874 debug_generic_expr (rhs1_type);
3875 return true;
3877 break;
3879 case ABSU_EXPR:
3880 if (!ANY_INTEGRAL_TYPE_P (lhs_type)
3881 || !TYPE_UNSIGNED (lhs_type)
3882 || !ANY_INTEGRAL_TYPE_P (rhs1_type)
3883 || TYPE_UNSIGNED (rhs1_type)
3884 || element_precision (lhs_type) != element_precision (rhs1_type))
3886 error ("invalid types for %qs", code_name);
3887 debug_generic_expr (lhs_type);
3888 debug_generic_expr (rhs1_type);
3889 return true;
3891 return false;
3893 case VEC_DUPLICATE_EXPR:
3894 if (TREE_CODE (lhs_type) != VECTOR_TYPE
3895 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
3897 error ("%qs should be from a scalar to a like vector", code_name);
3898 debug_generic_expr (lhs_type);
3899 debug_generic_expr (rhs1_type);
3900 return true;
3902 return false;
3904 default:
3905 gcc_unreachable ();
3908 /* For the remaining codes assert there is no conversion involved. */
3909 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3911 error ("non-trivial conversion in unary operation");
3912 debug_generic_expr (lhs_type);
3913 debug_generic_expr (rhs1_type);
3914 return true;
3917 return false;
3920 /* Verify a gimple assignment statement STMT with a binary rhs.
3921 Returns true if anything is wrong. */
3923 static bool
3924 verify_gimple_assign_binary (gassign *stmt)
3926 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3927 tree lhs = gimple_assign_lhs (stmt);
3928 tree lhs_type = TREE_TYPE (lhs);
3929 tree rhs1 = gimple_assign_rhs1 (stmt);
3930 tree rhs1_type = TREE_TYPE (rhs1);
3931 tree rhs2 = gimple_assign_rhs2 (stmt);
3932 tree rhs2_type = TREE_TYPE (rhs2);
3934 if (!is_gimple_reg (lhs))
3936 error ("non-register as LHS of binary operation");
3937 return true;
3940 if (!is_gimple_val (rhs1)
3941 || !is_gimple_val (rhs2))
3943 error ("invalid operands in binary operation");
3944 return true;
3947 const char* const code_name = get_tree_code_name (rhs_code);
3949 /* First handle operations that involve different types. */
3950 switch (rhs_code)
3952 case COMPLEX_EXPR:
3954 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3955 || !(INTEGRAL_TYPE_P (rhs1_type)
3956 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3957 || !(INTEGRAL_TYPE_P (rhs2_type)
3958 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3960 error ("type mismatch in %qs", code_name);
3961 debug_generic_expr (lhs_type);
3962 debug_generic_expr (rhs1_type);
3963 debug_generic_expr (rhs2_type);
3964 return true;
3967 return false;
3970 case LSHIFT_EXPR:
3971 case RSHIFT_EXPR:
3972 case LROTATE_EXPR:
3973 case RROTATE_EXPR:
3975 /* Shifts and rotates are ok on integral types, fixed point
3976 types and integer vector types. */
3977 if ((!INTEGRAL_TYPE_P (rhs1_type)
3978 && !FIXED_POINT_TYPE_P (rhs1_type)
3979 && ! (VECTOR_TYPE_P (rhs1_type)
3980 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3981 || (!INTEGRAL_TYPE_P (rhs2_type)
3982 /* Vector shifts of vectors are also ok. */
3983 && ! (VECTOR_TYPE_P (rhs1_type)
3984 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3985 && VECTOR_TYPE_P (rhs2_type)
3986 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3987 || !useless_type_conversion_p (lhs_type, rhs1_type))
3989 error ("type mismatch in %qs", code_name);
3990 debug_generic_expr (lhs_type);
3991 debug_generic_expr (rhs1_type);
3992 debug_generic_expr (rhs2_type);
3993 return true;
3996 return false;
3999 case WIDEN_LSHIFT_EXPR:
4001 if (!INTEGRAL_TYPE_P (lhs_type)
4002 || !INTEGRAL_TYPE_P (rhs1_type)
4003 || TREE_CODE (rhs2) != INTEGER_CST
4004 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
4006 error ("type mismatch in %qs", code_name);
4007 debug_generic_expr (lhs_type);
4008 debug_generic_expr (rhs1_type);
4009 debug_generic_expr (rhs2_type);
4010 return true;
4013 return false;
4016 case VEC_WIDEN_LSHIFT_HI_EXPR:
4017 case VEC_WIDEN_LSHIFT_LO_EXPR:
4019 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4020 || TREE_CODE (lhs_type) != VECTOR_TYPE
4021 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4022 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
4023 || TREE_CODE (rhs2) != INTEGER_CST
4024 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
4025 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
4027 error ("type mismatch in %qs", code_name);
4028 debug_generic_expr (lhs_type);
4029 debug_generic_expr (rhs1_type);
4030 debug_generic_expr (rhs2_type);
4031 return true;
4034 return false;
4037 case PLUS_EXPR:
4038 case MINUS_EXPR:
4040 tree lhs_etype = lhs_type;
4041 tree rhs1_etype = rhs1_type;
4042 tree rhs2_etype = rhs2_type;
4043 if (VECTOR_TYPE_P (lhs_type))
4045 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4046 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
4048 error ("invalid non-vector operands to %qs", code_name);
4049 return true;
4051 lhs_etype = TREE_TYPE (lhs_type);
4052 rhs1_etype = TREE_TYPE (rhs1_type);
4053 rhs2_etype = TREE_TYPE (rhs2_type);
4055 if (POINTER_TYPE_P (lhs_etype)
4056 || POINTER_TYPE_P (rhs1_etype)
4057 || POINTER_TYPE_P (rhs2_etype))
4059 error ("invalid (pointer) operands %qs", code_name);
4060 return true;
4063 /* Continue with generic binary expression handling. */
4064 break;
4067 case POINTER_PLUS_EXPR:
4069 if (!POINTER_TYPE_P (rhs1_type)
4070 || !useless_type_conversion_p (lhs_type, rhs1_type)
4071 || !ptrofftype_p (rhs2_type))
4073 error ("type mismatch in %qs", code_name);
4074 debug_generic_stmt (lhs_type);
4075 debug_generic_stmt (rhs1_type);
4076 debug_generic_stmt (rhs2_type);
4077 return true;
4080 return false;
4083 case POINTER_DIFF_EXPR:
4085 if (!POINTER_TYPE_P (rhs1_type)
4086 || !POINTER_TYPE_P (rhs2_type)
4087 /* Because we special-case pointers to void we allow difference
4088 of arbitrary pointers with the same mode. */
4089 || TYPE_MODE (rhs1_type) != TYPE_MODE (rhs2_type)
4090 || !INTEGRAL_TYPE_P (lhs_type)
4091 || TYPE_UNSIGNED (lhs_type)
4092 || TYPE_PRECISION (lhs_type) != TYPE_PRECISION (rhs1_type))
4094 error ("type mismatch in %qs", code_name);
4095 debug_generic_stmt (lhs_type);
4096 debug_generic_stmt (rhs1_type);
4097 debug_generic_stmt (rhs2_type);
4098 return true;
4101 return false;
4104 case TRUTH_ANDIF_EXPR:
4105 case TRUTH_ORIF_EXPR:
4106 case TRUTH_AND_EXPR:
4107 case TRUTH_OR_EXPR:
4108 case TRUTH_XOR_EXPR:
4110 gcc_unreachable ();
4112 case LT_EXPR:
4113 case LE_EXPR:
4114 case GT_EXPR:
4115 case GE_EXPR:
4116 case EQ_EXPR:
4117 case NE_EXPR:
4118 case UNORDERED_EXPR:
4119 case ORDERED_EXPR:
4120 case UNLT_EXPR:
4121 case UNLE_EXPR:
4122 case UNGT_EXPR:
4123 case UNGE_EXPR:
4124 case UNEQ_EXPR:
4125 case LTGT_EXPR:
4126 /* Comparisons are also binary, but the result type is not
4127 connected to the operand types. */
4128 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
4130 case WIDEN_MULT_EXPR:
4131 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
4132 return true;
4133 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
4134 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
4136 case WIDEN_SUM_EXPR:
4138 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4139 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4140 && ((!INTEGRAL_TYPE_P (rhs1_type)
4141 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4142 || (!INTEGRAL_TYPE_P (lhs_type)
4143 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4144 || !useless_type_conversion_p (lhs_type, rhs2_type)
4145 || maybe_lt (GET_MODE_SIZE (element_mode (rhs2_type)),
4146 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4148 error ("type mismatch in %qs", code_name);
4149 debug_generic_expr (lhs_type);
4150 debug_generic_expr (rhs1_type);
4151 debug_generic_expr (rhs2_type);
4152 return true;
4154 return false;
4157 case VEC_WIDEN_MULT_HI_EXPR:
4158 case VEC_WIDEN_MULT_LO_EXPR:
4159 case VEC_WIDEN_MULT_EVEN_EXPR:
4160 case VEC_WIDEN_MULT_ODD_EXPR:
4162 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4163 || TREE_CODE (lhs_type) != VECTOR_TYPE
4164 || !types_compatible_p (rhs1_type, rhs2_type)
4165 || maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
4166 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4168 error ("type mismatch in %qs", code_name);
4169 debug_generic_expr (lhs_type);
4170 debug_generic_expr (rhs1_type);
4171 debug_generic_expr (rhs2_type);
4172 return true;
4174 return false;
4177 case VEC_PACK_TRUNC_EXPR:
4178 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
4179 vector boolean types. */
4180 if (VECTOR_BOOLEAN_TYPE_P (lhs_type)
4181 && VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4182 && types_compatible_p (rhs1_type, rhs2_type)
4183 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
4184 2 * TYPE_VECTOR_SUBPARTS (rhs1_type)))
4185 return false;
4187 /* Fallthru. */
4188 case VEC_PACK_SAT_EXPR:
4189 case VEC_PACK_FIX_TRUNC_EXPR:
4191 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4192 || TREE_CODE (lhs_type) != VECTOR_TYPE
4193 || !((rhs_code == VEC_PACK_FIX_TRUNC_EXPR
4194 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
4195 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type)))
4196 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4197 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))))
4198 || !types_compatible_p (rhs1_type, rhs2_type)
4199 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4200 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4201 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4202 TYPE_VECTOR_SUBPARTS (lhs_type)))
4204 error ("type mismatch in %qs", code_name);
4205 debug_generic_expr (lhs_type);
4206 debug_generic_expr (rhs1_type);
4207 debug_generic_expr (rhs2_type);
4208 return true;
4211 return false;
4214 case VEC_PACK_FLOAT_EXPR:
4215 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4216 || TREE_CODE (lhs_type) != VECTOR_TYPE
4217 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4218 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))
4219 || !types_compatible_p (rhs1_type, rhs2_type)
4220 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4221 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4222 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4223 TYPE_VECTOR_SUBPARTS (lhs_type)))
4225 error ("type mismatch in %qs", code_name);
4226 debug_generic_expr (lhs_type);
4227 debug_generic_expr (rhs1_type);
4228 debug_generic_expr (rhs2_type);
4229 return true;
4232 return false;
4234 case MULT_EXPR:
4235 case MULT_HIGHPART_EXPR:
4236 case TRUNC_DIV_EXPR:
4237 case CEIL_DIV_EXPR:
4238 case FLOOR_DIV_EXPR:
4239 case ROUND_DIV_EXPR:
4240 case TRUNC_MOD_EXPR:
4241 case CEIL_MOD_EXPR:
4242 case FLOOR_MOD_EXPR:
4243 case ROUND_MOD_EXPR:
4244 case RDIV_EXPR:
4245 case EXACT_DIV_EXPR:
4246 case BIT_IOR_EXPR:
4247 case BIT_XOR_EXPR:
4248 /* Disallow pointer and offset types for many of the binary gimple. */
4249 if (POINTER_TYPE_P (lhs_type)
4250 || TREE_CODE (lhs_type) == OFFSET_TYPE)
4252 error ("invalid types for %qs", code_name);
4253 debug_generic_expr (lhs_type);
4254 debug_generic_expr (rhs1_type);
4255 debug_generic_expr (rhs2_type);
4256 return true;
4258 /* Continue with generic binary expression handling. */
4259 break;
4261 case MIN_EXPR:
4262 case MAX_EXPR:
4263 /* Continue with generic binary expression handling. */
4264 break;
4266 case BIT_AND_EXPR:
4267 if (POINTER_TYPE_P (lhs_type)
4268 && TREE_CODE (rhs2) == INTEGER_CST)
4269 break;
4270 /* Disallow pointer and offset types for many of the binary gimple. */
4271 if (POINTER_TYPE_P (lhs_type)
4272 || TREE_CODE (lhs_type) == OFFSET_TYPE)
4274 error ("invalid types for %qs", code_name);
4275 debug_generic_expr (lhs_type);
4276 debug_generic_expr (rhs1_type);
4277 debug_generic_expr (rhs2_type);
4278 return true;
4280 /* Continue with generic binary expression handling. */
4281 break;
4283 case VEC_SERIES_EXPR:
4284 if (!useless_type_conversion_p (rhs1_type, rhs2_type))
4286 error ("type mismatch in %qs", code_name);
4287 debug_generic_expr (rhs1_type);
4288 debug_generic_expr (rhs2_type);
4289 return true;
4291 if (TREE_CODE (lhs_type) != VECTOR_TYPE
4292 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
4294 error ("vector type expected in %qs", code_name);
4295 debug_generic_expr (lhs_type);
4296 return true;
4298 return false;
4300 default:
4301 gcc_unreachable ();
4304 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4305 || !useless_type_conversion_p (lhs_type, rhs2_type))
4307 error ("type mismatch in binary expression");
4308 debug_generic_stmt (lhs_type);
4309 debug_generic_stmt (rhs1_type);
4310 debug_generic_stmt (rhs2_type);
4311 return true;
4314 return false;
4317 /* Verify a gimple assignment statement STMT with a ternary rhs.
4318 Returns true if anything is wrong. */
4320 static bool
4321 verify_gimple_assign_ternary (gassign *stmt)
4323 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4324 tree lhs = gimple_assign_lhs (stmt);
4325 tree lhs_type = TREE_TYPE (lhs);
4326 tree rhs1 = gimple_assign_rhs1 (stmt);
4327 tree rhs1_type = TREE_TYPE (rhs1);
4328 tree rhs2 = gimple_assign_rhs2 (stmt);
4329 tree rhs2_type = TREE_TYPE (rhs2);
4330 tree rhs3 = gimple_assign_rhs3 (stmt);
4331 tree rhs3_type = TREE_TYPE (rhs3);
4333 if (!is_gimple_reg (lhs))
4335 error ("non-register as LHS of ternary operation");
4336 return true;
4339 if (!is_gimple_val (rhs1)
4340 || !is_gimple_val (rhs2)
4341 || !is_gimple_val (rhs3))
4343 error ("invalid operands in ternary operation");
4344 return true;
4347 const char* const code_name = get_tree_code_name (rhs_code);
4349 /* First handle operations that involve different types. */
4350 switch (rhs_code)
4352 case WIDEN_MULT_PLUS_EXPR:
4353 case WIDEN_MULT_MINUS_EXPR:
4354 if ((!INTEGRAL_TYPE_P (rhs1_type)
4355 && !FIXED_POINT_TYPE_P (rhs1_type))
4356 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4357 || !useless_type_conversion_p (lhs_type, rhs3_type)
4358 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4359 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4361 error ("type mismatch in %qs", code_name);
4362 debug_generic_expr (lhs_type);
4363 debug_generic_expr (rhs1_type);
4364 debug_generic_expr (rhs2_type);
4365 debug_generic_expr (rhs3_type);
4366 return true;
4368 break;
4370 case VEC_COND_EXPR:
4371 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4372 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4373 TYPE_VECTOR_SUBPARTS (lhs_type)))
4375 error ("the first argument of a %qs must be of a "
4376 "boolean vector type of the same number of elements "
4377 "as the result", code_name);
4378 debug_generic_expr (lhs_type);
4379 debug_generic_expr (rhs1_type);
4380 return true;
4382 /* Fallthrough. */
4383 case COND_EXPR:
4384 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4385 || !useless_type_conversion_p (lhs_type, rhs3_type))
4387 error ("type mismatch in %qs", code_name);
4388 debug_generic_expr (lhs_type);
4389 debug_generic_expr (rhs2_type);
4390 debug_generic_expr (rhs3_type);
4391 return true;
4393 break;
4395 case VEC_PERM_EXPR:
4396 /* If permute is constant, then we allow for lhs and rhs
4397 to have different vector types, provided:
4398 (1) lhs, rhs1, rhs2 have same element type.
4399 (2) rhs3 vector is constant and has integer element type.
4400 (3) len(lhs) == len(rhs3) && len(rhs1) == len(rhs2). */
4402 if (TREE_CODE (lhs_type) != VECTOR_TYPE
4403 || TREE_CODE (rhs1_type) != VECTOR_TYPE
4404 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4405 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4407 error ("vector types expected in %qs", code_name);
4408 debug_generic_expr (lhs_type);
4409 debug_generic_expr (rhs1_type);
4410 debug_generic_expr (rhs2_type);
4411 debug_generic_expr (rhs3_type);
4412 return true;
4415 /* If rhs3 is constant, we allow lhs, rhs1 and rhs2 to be different vector types,
4416 as long as lhs, rhs1 and rhs2 have same element type. */
4417 if (TREE_CONSTANT (rhs3)
4418 ? (!useless_type_conversion_p (TREE_TYPE (lhs_type), TREE_TYPE (rhs1_type))
4419 || !useless_type_conversion_p (TREE_TYPE (lhs_type), TREE_TYPE (rhs2_type)))
4420 : (!useless_type_conversion_p (lhs_type, rhs1_type)
4421 || !useless_type_conversion_p (lhs_type, rhs2_type)))
4423 error ("type mismatch in %qs", code_name);
4424 debug_generic_expr (lhs_type);
4425 debug_generic_expr (rhs1_type);
4426 debug_generic_expr (rhs2_type);
4427 debug_generic_expr (rhs3_type);
4428 return true;
4431 /* If rhs3 is constant, relax the check len(rhs2) == len(rhs3). */
4432 if (maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4433 TYPE_VECTOR_SUBPARTS (rhs2_type))
4434 || (!TREE_CONSTANT(rhs3)
4435 && maybe_ne (TYPE_VECTOR_SUBPARTS (rhs2_type),
4436 TYPE_VECTOR_SUBPARTS (rhs3_type)))
4437 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs3_type),
4438 TYPE_VECTOR_SUBPARTS (lhs_type)))
4440 error ("vectors with different element number found in %qs",
4441 code_name);
4442 debug_generic_expr (lhs_type);
4443 debug_generic_expr (rhs1_type);
4444 debug_generic_expr (rhs2_type);
4445 debug_generic_expr (rhs3_type);
4446 return true;
4449 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4450 || (TREE_CODE (rhs3) != VECTOR_CST
4451 && (GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE
4452 (TREE_TYPE (rhs3_type)))
4453 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE
4454 (TREE_TYPE (rhs1_type))))))
4456 error ("invalid mask type in %qs", code_name);
4457 debug_generic_expr (lhs_type);
4458 debug_generic_expr (rhs1_type);
4459 debug_generic_expr (rhs2_type);
4460 debug_generic_expr (rhs3_type);
4461 return true;
4464 return false;
4466 case SAD_EXPR:
4467 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4468 || !useless_type_conversion_p (lhs_type, rhs3_type)
4469 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4470 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4472 error ("type mismatch in %qs", code_name);
4473 debug_generic_expr (lhs_type);
4474 debug_generic_expr (rhs1_type);
4475 debug_generic_expr (rhs2_type);
4476 debug_generic_expr (rhs3_type);
4477 return true;
4480 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4481 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4482 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4484 error ("vector types expected in %qs", code_name);
4485 debug_generic_expr (lhs_type);
4486 debug_generic_expr (rhs1_type);
4487 debug_generic_expr (rhs2_type);
4488 debug_generic_expr (rhs3_type);
4489 return true;
4492 return false;
4494 case BIT_INSERT_EXPR:
4495 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4497 error ("type mismatch in %qs", code_name);
4498 debug_generic_expr (lhs_type);
4499 debug_generic_expr (rhs1_type);
4500 return true;
4502 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4503 && INTEGRAL_TYPE_P (rhs2_type))
4504 /* Vector element insert. */
4505 || (VECTOR_TYPE_P (rhs1_type)
4506 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))
4507 /* Aligned sub-vector insert. */
4508 || (VECTOR_TYPE_P (rhs1_type)
4509 && VECTOR_TYPE_P (rhs2_type)
4510 && types_compatible_p (TREE_TYPE (rhs1_type),
4511 TREE_TYPE (rhs2_type))
4512 && multiple_p (TYPE_VECTOR_SUBPARTS (rhs1_type),
4513 TYPE_VECTOR_SUBPARTS (rhs2_type))
4514 && multiple_p (wi::to_poly_offset (rhs3),
4515 wi::to_poly_offset (TYPE_SIZE (rhs2_type))))))
4517 error ("not allowed type combination in %qs", code_name);
4518 debug_generic_expr (rhs1_type);
4519 debug_generic_expr (rhs2_type);
4520 return true;
4522 if (! tree_fits_uhwi_p (rhs3)
4523 || ! types_compatible_p (bitsizetype, TREE_TYPE (rhs3))
4524 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4526 error ("invalid position or size in %qs", code_name);
4527 return true;
4529 if (INTEGRAL_TYPE_P (rhs1_type)
4530 && !type_has_mode_precision_p (rhs1_type))
4532 error ("%qs into non-mode-precision operand", code_name);
4533 return true;
4535 if (INTEGRAL_TYPE_P (rhs1_type))
4537 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4538 if (bitpos >= TYPE_PRECISION (rhs1_type)
4539 || (bitpos + TYPE_PRECISION (rhs2_type)
4540 > TYPE_PRECISION (rhs1_type)))
4542 error ("insertion out of range in %qs", code_name);
4543 return true;
4546 else if (VECTOR_TYPE_P (rhs1_type))
4548 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4549 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4550 if (bitpos % bitsize != 0)
4552 error ("%qs not at element boundary", code_name);
4553 return true;
4556 return false;
4558 case DOT_PROD_EXPR:
4560 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4561 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4562 && ((!INTEGRAL_TYPE_P (rhs1_type)
4563 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4564 || (!INTEGRAL_TYPE_P (lhs_type)
4565 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4566 /* rhs1_type and rhs2_type may differ in sign. */
4567 || !tree_nop_conversion_p (rhs1_type, rhs2_type)
4568 || !useless_type_conversion_p (lhs_type, rhs3_type)
4569 || maybe_lt (GET_MODE_SIZE (element_mode (rhs3_type)),
4570 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4572 error ("type mismatch in %qs", code_name);
4573 debug_generic_expr (lhs_type);
4574 debug_generic_expr (rhs1_type);
4575 debug_generic_expr (rhs2_type);
4576 return true;
4578 return false;
4581 case REALIGN_LOAD_EXPR:
4582 /* FIXME. */
4583 return false;
4585 default:
4586 gcc_unreachable ();
4588 return false;
4591 /* Verify a gimple assignment statement STMT with a single rhs.
4592 Returns true if anything is wrong. */
4594 static bool
4595 verify_gimple_assign_single (gassign *stmt)
4597 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4598 tree lhs = gimple_assign_lhs (stmt);
4599 tree lhs_type = TREE_TYPE (lhs);
4600 tree rhs1 = gimple_assign_rhs1 (stmt);
4601 tree rhs1_type = TREE_TYPE (rhs1);
4602 bool res = false;
4604 const char* const code_name = get_tree_code_name (rhs_code);
4606 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4608 error ("non-trivial conversion in %qs", code_name);
4609 debug_generic_expr (lhs_type);
4610 debug_generic_expr (rhs1_type);
4611 return true;
4614 if (gimple_clobber_p (stmt)
4615 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4617 error ("%qs LHS in clobber statement",
4618 get_tree_code_name (TREE_CODE (lhs)));
4619 debug_generic_expr (lhs);
4620 return true;
4623 if (TREE_CODE (lhs) == WITH_SIZE_EXPR)
4625 error ("%qs LHS in assignment statement",
4626 get_tree_code_name (TREE_CODE (lhs)));
4627 debug_generic_expr (lhs);
4628 return true;
4631 if (handled_component_p (lhs)
4632 || TREE_CODE (lhs) == MEM_REF
4633 || TREE_CODE (lhs) == TARGET_MEM_REF)
4634 res |= verify_types_in_gimple_reference (lhs, true);
4636 /* Special codes we cannot handle via their class. */
4637 switch (rhs_code)
4639 case ADDR_EXPR:
4641 tree op = TREE_OPERAND (rhs1, 0);
4642 if (!is_gimple_addressable (op))
4644 error ("invalid operand in %qs", code_name);
4645 return true;
4648 /* Technically there is no longer a need for matching types, but
4649 gimple hygiene asks for this check. In LTO we can end up
4650 combining incompatible units and thus end up with addresses
4651 of globals that change their type to a common one. */
4652 if (!in_lto_p
4653 && !types_compatible_p (TREE_TYPE (op),
4654 TREE_TYPE (TREE_TYPE (rhs1)))
4655 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4656 TREE_TYPE (op)))
4658 error ("type mismatch in %qs", code_name);
4659 debug_generic_stmt (TREE_TYPE (rhs1));
4660 debug_generic_stmt (TREE_TYPE (op));
4661 return true;
4664 return (verify_address (rhs1, true)
4665 || verify_types_in_gimple_reference (op, true));
4668 /* tcc_reference */
4669 case INDIRECT_REF:
4670 error ("%qs in gimple IL", code_name);
4671 return true;
4673 case WITH_SIZE_EXPR:
4674 if (!is_gimple_val (TREE_OPERAND (rhs1, 1)))
4676 error ("invalid %qs size argument in load", code_name);
4677 debug_generic_stmt (lhs);
4678 debug_generic_stmt (rhs1);
4679 return true;
4681 rhs1 = TREE_OPERAND (rhs1, 0);
4682 /* Fallthru. */
4683 case COMPONENT_REF:
4684 case BIT_FIELD_REF:
4685 case ARRAY_REF:
4686 case ARRAY_RANGE_REF:
4687 case VIEW_CONVERT_EXPR:
4688 case REALPART_EXPR:
4689 case IMAGPART_EXPR:
4690 case TARGET_MEM_REF:
4691 case MEM_REF:
4692 if (!is_gimple_reg (lhs)
4693 && is_gimple_reg_type (TREE_TYPE (lhs)))
4695 error ("invalid RHS for gimple memory store: %qs", code_name);
4696 debug_generic_stmt (lhs);
4697 debug_generic_stmt (rhs1);
4698 return true;
4700 return res || verify_types_in_gimple_reference (rhs1, false);
4702 /* tcc_constant */
4703 case SSA_NAME:
4704 case INTEGER_CST:
4705 case REAL_CST:
4706 case FIXED_CST:
4707 case COMPLEX_CST:
4708 case VECTOR_CST:
4709 case STRING_CST:
4710 return res;
4712 /* tcc_declaration */
4713 case CONST_DECL:
4714 return res;
4715 case VAR_DECL:
4716 case PARM_DECL:
4717 if (!is_gimple_reg (lhs)
4718 && !is_gimple_reg (rhs1)
4719 && is_gimple_reg_type (TREE_TYPE (lhs)))
4721 error ("invalid RHS for gimple memory store: %qs", code_name);
4722 debug_generic_stmt (lhs);
4723 debug_generic_stmt (rhs1);
4724 return true;
4726 return res;
4728 case CONSTRUCTOR:
4729 if (VECTOR_TYPE_P (rhs1_type))
4731 unsigned int i;
4732 tree elt_i, elt_v, elt_t = NULL_TREE;
4734 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4735 return res;
4736 /* For vector CONSTRUCTORs we require that either it is empty
4737 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4738 (then the element count must be correct to cover the whole
4739 outer vector and index must be NULL on all elements, or it is
4740 a CONSTRUCTOR of scalar elements, where we as an exception allow
4741 smaller number of elements (assuming zero filling) and
4742 consecutive indexes as compared to NULL indexes (such
4743 CONSTRUCTORs can appear in the IL from FEs). */
4744 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4746 if (elt_t == NULL_TREE)
4748 elt_t = TREE_TYPE (elt_v);
4749 if (VECTOR_TYPE_P (elt_t))
4751 tree elt_t = TREE_TYPE (elt_v);
4752 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4753 TREE_TYPE (elt_t)))
4755 error ("incorrect type of vector %qs elements",
4756 code_name);
4757 debug_generic_stmt (rhs1);
4758 return true;
4760 else if (maybe_ne (CONSTRUCTOR_NELTS (rhs1)
4761 * TYPE_VECTOR_SUBPARTS (elt_t),
4762 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4764 error ("incorrect number of vector %qs elements",
4765 code_name);
4766 debug_generic_stmt (rhs1);
4767 return true;
4770 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4771 elt_t))
4773 error ("incorrect type of vector %qs elements",
4774 code_name);
4775 debug_generic_stmt (rhs1);
4776 return true;
4778 else if (maybe_gt (CONSTRUCTOR_NELTS (rhs1),
4779 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4781 error ("incorrect number of vector %qs elements",
4782 code_name);
4783 debug_generic_stmt (rhs1);
4784 return true;
4787 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4789 error ("incorrect type of vector CONSTRUCTOR elements");
4790 debug_generic_stmt (rhs1);
4791 return true;
4793 if (elt_i != NULL_TREE
4794 && (VECTOR_TYPE_P (elt_t)
4795 || TREE_CODE (elt_i) != INTEGER_CST
4796 || compare_tree_int (elt_i, i) != 0))
4798 error ("vector %qs with non-NULL element index",
4799 code_name);
4800 debug_generic_stmt (rhs1);
4801 return true;
4803 if (!is_gimple_val (elt_v))
4805 error ("vector %qs element is not a GIMPLE value",
4806 code_name);
4807 debug_generic_stmt (rhs1);
4808 return true;
4812 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4814 error ("non-vector %qs with elements", code_name);
4815 debug_generic_stmt (rhs1);
4816 return true;
4818 return res;
4820 case OBJ_TYPE_REF:
4821 /* FIXME. */
4822 return res;
4824 default:;
4827 return res;
4830 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4831 is a problem, otherwise false. */
4833 static bool
4834 verify_gimple_assign (gassign *stmt)
4836 switch (gimple_assign_rhs_class (stmt))
4838 case GIMPLE_SINGLE_RHS:
4839 return verify_gimple_assign_single (stmt);
4841 case GIMPLE_UNARY_RHS:
4842 return verify_gimple_assign_unary (stmt);
4844 case GIMPLE_BINARY_RHS:
4845 return verify_gimple_assign_binary (stmt);
4847 case GIMPLE_TERNARY_RHS:
4848 return verify_gimple_assign_ternary (stmt);
4850 default:
4851 gcc_unreachable ();
4855 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4856 is a problem, otherwise false. */
4858 static bool
4859 verify_gimple_return (greturn *stmt)
4861 tree op = gimple_return_retval (stmt);
4862 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4864 /* We cannot test for present return values as we do not fix up missing
4865 return values from the original source. */
4866 if (op == NULL)
4867 return false;
4869 if (!is_gimple_val (op)
4870 && TREE_CODE (op) != RESULT_DECL)
4872 error ("invalid operand in return statement");
4873 debug_generic_stmt (op);
4874 return true;
4877 if ((TREE_CODE (op) == RESULT_DECL
4878 && DECL_BY_REFERENCE (op))
4879 || (TREE_CODE (op) == SSA_NAME
4880 && SSA_NAME_VAR (op)
4881 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4882 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4883 op = TREE_TYPE (op);
4885 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4887 error ("invalid conversion in return statement");
4888 debug_generic_stmt (restype);
4889 debug_generic_stmt (TREE_TYPE (op));
4890 return true;
4893 return false;
4897 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4898 is a problem, otherwise false. */
4900 static bool
4901 verify_gimple_goto (ggoto *stmt)
4903 tree dest = gimple_goto_dest (stmt);
4905 /* ??? We have two canonical forms of direct goto destinations, a
4906 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4907 if (TREE_CODE (dest) != LABEL_DECL
4908 && (!is_gimple_val (dest)
4909 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4911 error ("goto destination is neither a label nor a pointer");
4912 return true;
4915 return false;
4918 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4919 is a problem, otherwise false. */
4921 static bool
4922 verify_gimple_switch (gswitch *stmt)
4924 unsigned int i, n;
4925 tree elt, prev_upper_bound = NULL_TREE;
4926 tree index_type, elt_type = NULL_TREE;
4928 if (!is_gimple_val (gimple_switch_index (stmt)))
4930 error ("invalid operand to switch statement");
4931 debug_generic_stmt (gimple_switch_index (stmt));
4932 return true;
4935 index_type = TREE_TYPE (gimple_switch_index (stmt));
4936 if (! INTEGRAL_TYPE_P (index_type))
4938 error ("non-integral type switch statement");
4939 debug_generic_expr (index_type);
4940 return true;
4943 elt = gimple_switch_label (stmt, 0);
4944 if (CASE_LOW (elt) != NULL_TREE
4945 || CASE_HIGH (elt) != NULL_TREE
4946 || CASE_CHAIN (elt) != NULL_TREE)
4948 error ("invalid default case label in switch statement");
4949 debug_generic_expr (elt);
4950 return true;
4953 n = gimple_switch_num_labels (stmt);
4954 for (i = 1; i < n; i++)
4956 elt = gimple_switch_label (stmt, i);
4958 if (CASE_CHAIN (elt))
4960 error ("invalid %<CASE_CHAIN%>");
4961 debug_generic_expr (elt);
4962 return true;
4964 if (! CASE_LOW (elt))
4966 error ("invalid case label in switch statement");
4967 debug_generic_expr (elt);
4968 return true;
4970 if (CASE_HIGH (elt)
4971 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4973 error ("invalid case range in switch statement");
4974 debug_generic_expr (elt);
4975 return true;
4978 if (! elt_type)
4980 elt_type = TREE_TYPE (CASE_LOW (elt));
4981 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4983 error ("type precision mismatch in switch statement");
4984 return true;
4987 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4988 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4990 error ("type mismatch for case label in switch statement");
4991 debug_generic_expr (elt);
4992 return true;
4995 if (prev_upper_bound)
4997 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4999 error ("case labels not sorted in switch statement");
5000 return true;
5004 prev_upper_bound = CASE_HIGH (elt);
5005 if (! prev_upper_bound)
5006 prev_upper_bound = CASE_LOW (elt);
5009 return false;
5012 /* Verify a gimple debug statement STMT.
5013 Returns true if anything is wrong. */
5015 static bool
5016 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
5018 /* There isn't much that could be wrong in a gimple debug stmt. A
5019 gimple debug bind stmt, for example, maps a tree, that's usually
5020 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
5021 component or member of an aggregate type, to another tree, that
5022 can be an arbitrary expression. These stmts expand into debug
5023 insns, and are converted to debug notes by var-tracking.cc. */
5024 return false;
5027 /* Verify a gimple label statement STMT.
5028 Returns true if anything is wrong. */
5030 static bool
5031 verify_gimple_label (glabel *stmt)
5033 tree decl = gimple_label_label (stmt);
5034 int uid;
5035 bool err = false;
5037 if (TREE_CODE (decl) != LABEL_DECL)
5038 return true;
5039 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
5040 && DECL_CONTEXT (decl) != current_function_decl)
5042 error ("label context is not the current function declaration");
5043 err |= true;
5046 uid = LABEL_DECL_UID (decl);
5047 if (cfun->cfg
5048 && (uid == -1
5049 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
5051 error ("incorrect entry in %<label_to_block_map%>");
5052 err |= true;
5055 uid = EH_LANDING_PAD_NR (decl);
5056 if (uid)
5058 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
5059 if (decl != lp->post_landing_pad)
5061 error ("incorrect setting of landing pad number");
5062 err |= true;
5066 return err;
5069 /* Verify a gimple cond statement STMT.
5070 Returns true if anything is wrong. */
5072 static bool
5073 verify_gimple_cond (gcond *stmt)
5075 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
5077 error ("invalid comparison code in gimple cond");
5078 return true;
5080 if (!(!gimple_cond_true_label (stmt)
5081 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
5082 || !(!gimple_cond_false_label (stmt)
5083 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
5085 error ("invalid labels in gimple cond");
5086 return true;
5089 return verify_gimple_comparison (boolean_type_node,
5090 gimple_cond_lhs (stmt),
5091 gimple_cond_rhs (stmt),
5092 gimple_cond_code (stmt));
5095 /* Verify the GIMPLE statement STMT. Returns true if there is an
5096 error, otherwise false. */
5098 static bool
5099 verify_gimple_stmt (gimple *stmt)
5101 switch (gimple_code (stmt))
5103 case GIMPLE_ASSIGN:
5104 return verify_gimple_assign (as_a <gassign *> (stmt));
5106 case GIMPLE_LABEL:
5107 return verify_gimple_label (as_a <glabel *> (stmt));
5109 case GIMPLE_CALL:
5110 return verify_gimple_call (as_a <gcall *> (stmt));
5112 case GIMPLE_COND:
5113 return verify_gimple_cond (as_a <gcond *> (stmt));
5115 case GIMPLE_GOTO:
5116 return verify_gimple_goto (as_a <ggoto *> (stmt));
5118 case GIMPLE_SWITCH:
5119 return verify_gimple_switch (as_a <gswitch *> (stmt));
5121 case GIMPLE_RETURN:
5122 return verify_gimple_return (as_a <greturn *> (stmt));
5124 case GIMPLE_ASM:
5125 return false;
5127 case GIMPLE_TRANSACTION:
5128 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
5130 /* Tuples that do not have tree operands. */
5131 case GIMPLE_NOP:
5132 case GIMPLE_PREDICT:
5133 case GIMPLE_RESX:
5134 case GIMPLE_EH_DISPATCH:
5135 case GIMPLE_EH_MUST_NOT_THROW:
5136 return false;
5138 CASE_GIMPLE_OMP:
5139 /* OpenMP directives are validated by the FE and never operated
5140 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
5141 non-gimple expressions when the main index variable has had
5142 its address taken. This does not affect the loop itself
5143 because the header of an GIMPLE_OMP_FOR is merely used to determine
5144 how to setup the parallel iteration. */
5145 return false;
5147 case GIMPLE_ASSUME:
5148 return false;
5150 case GIMPLE_DEBUG:
5151 return verify_gimple_debug (stmt);
5153 default:
5154 gcc_unreachable ();
5158 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
5159 and false otherwise. */
5161 static bool
5162 verify_gimple_phi (gphi *phi)
5164 bool err = false;
5165 unsigned i;
5166 tree phi_result = gimple_phi_result (phi);
5167 bool virtual_p;
5169 if (!phi_result)
5171 error ("invalid %<PHI%> result");
5172 return true;
5175 virtual_p = virtual_operand_p (phi_result);
5176 if (TREE_CODE (phi_result) != SSA_NAME
5177 || (virtual_p
5178 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
5180 error ("invalid %<PHI%> result");
5181 err = true;
5184 for (i = 0; i < gimple_phi_num_args (phi); i++)
5186 tree t = gimple_phi_arg_def (phi, i);
5188 if (!t)
5190 error ("missing %<PHI%> def");
5191 err |= true;
5192 continue;
5194 /* Addressable variables do have SSA_NAMEs but they
5195 are not considered gimple values. */
5196 else if ((TREE_CODE (t) == SSA_NAME
5197 && virtual_p != virtual_operand_p (t))
5198 || (virtual_p
5199 && (TREE_CODE (t) != SSA_NAME
5200 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
5201 || (!virtual_p
5202 && !is_gimple_val (t)))
5204 error ("invalid %<PHI%> argument");
5205 debug_generic_expr (t);
5206 err |= true;
5208 #ifdef ENABLE_TYPES_CHECKING
5209 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
5211 error ("incompatible types in %<PHI%> argument %u", i);
5212 debug_generic_stmt (TREE_TYPE (phi_result));
5213 debug_generic_stmt (TREE_TYPE (t));
5214 err |= true;
5216 #endif
5219 return err;
5222 /* Verify the GIMPLE statements inside the sequence STMTS. */
5224 static bool
5225 verify_gimple_in_seq_2 (gimple_seq stmts)
5227 gimple_stmt_iterator ittr;
5228 bool err = false;
5230 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
5232 gimple *stmt = gsi_stmt (ittr);
5234 switch (gimple_code (stmt))
5236 case GIMPLE_BIND:
5237 err |= verify_gimple_in_seq_2 (
5238 gimple_bind_body (as_a <gbind *> (stmt)));
5239 break;
5241 case GIMPLE_TRY:
5242 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
5243 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
5244 break;
5246 case GIMPLE_EH_FILTER:
5247 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
5248 break;
5250 case GIMPLE_EH_ELSE:
5252 geh_else *eh_else = as_a <geh_else *> (stmt);
5253 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
5254 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
5256 break;
5258 case GIMPLE_CATCH:
5259 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
5260 as_a <gcatch *> (stmt)));
5261 break;
5263 case GIMPLE_ASSUME:
5264 err |= verify_gimple_in_seq_2 (gimple_assume_body (stmt));
5265 break;
5267 case GIMPLE_TRANSACTION:
5268 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
5269 break;
5271 default:
5273 bool err2 = verify_gimple_stmt (stmt);
5274 if (err2)
5275 debug_gimple_stmt (stmt);
5276 err |= err2;
5281 return err;
5284 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
5285 is a problem, otherwise false. */
5287 static bool
5288 verify_gimple_transaction (gtransaction *stmt)
5290 tree lab;
5292 lab = gimple_transaction_label_norm (stmt);
5293 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5294 return true;
5295 lab = gimple_transaction_label_uninst (stmt);
5296 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5297 return true;
5298 lab = gimple_transaction_label_over (stmt);
5299 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5300 return true;
5302 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
5306 /* Verify the GIMPLE statements inside the statement list STMTS. */
5308 DEBUG_FUNCTION bool
5309 verify_gimple_in_seq (gimple_seq stmts, bool ice)
5311 timevar_push (TV_TREE_STMT_VERIFY);
5312 bool res = verify_gimple_in_seq_2 (stmts);
5313 if (res && ice)
5314 internal_error ("%<verify_gimple%> failed");
5315 timevar_pop (TV_TREE_STMT_VERIFY);
5316 return res;
5319 /* Return true when the T can be shared. */
5321 static bool
5322 tree_node_can_be_shared (tree t)
5324 if (IS_TYPE_OR_DECL_P (t)
5325 || TREE_CODE (t) == SSA_NAME
5326 || TREE_CODE (t) == IDENTIFIER_NODE
5327 || TREE_CODE (t) == CASE_LABEL_EXPR
5328 || is_gimple_min_invariant (t))
5329 return true;
5331 if (t == error_mark_node)
5332 return true;
5334 return false;
5337 /* Called via walk_tree. Verify tree sharing. */
5339 static tree
5340 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
5342 hash_set<void *> *visited = (hash_set<void *> *) data;
5344 if (tree_node_can_be_shared (*tp))
5346 *walk_subtrees = false;
5347 return NULL;
5350 if (visited->add (*tp))
5351 return *tp;
5353 return NULL;
5356 /* Called via walk_gimple_stmt. Verify tree sharing. */
5358 static tree
5359 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5361 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5362 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5365 static bool eh_error_found;
5366 bool
5367 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5368 hash_set<gimple *> *visited)
5370 if (!visited->contains (stmt))
5372 error ("dead statement in EH table");
5373 debug_gimple_stmt (stmt);
5374 eh_error_found = true;
5376 return true;
5379 /* Verify if the location LOCs block is in BLOCKS. */
5381 static bool
5382 verify_location (hash_set<tree> *blocks, location_t loc)
5384 tree block = LOCATION_BLOCK (loc);
5385 if (block != NULL_TREE
5386 && !blocks->contains (block))
5388 error ("location references block not in block tree");
5389 return true;
5391 if (block != NULL_TREE)
5392 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5393 return false;
5396 /* Called via walk_tree. Verify that expressions have no blocks. */
5398 static tree
5399 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5401 if (!EXPR_P (*tp))
5403 *walk_subtrees = false;
5404 return NULL;
5407 location_t loc = EXPR_LOCATION (*tp);
5408 if (LOCATION_BLOCK (loc) != NULL)
5409 return *tp;
5411 return NULL;
5414 /* Called via walk_tree. Verify locations of expressions. */
5416 static tree
5417 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5419 hash_set<tree> *blocks = (hash_set<tree> *) data;
5420 tree t = *tp;
5422 /* ??? This doesn't really belong here but there's no good place to
5423 stick this remainder of old verify_expr. */
5424 /* ??? This barfs on debug stmts which contain binds to vars with
5425 different function context. */
5426 #if 0
5427 if (VAR_P (t)
5428 || TREE_CODE (t) == PARM_DECL
5429 || TREE_CODE (t) == RESULT_DECL)
5431 tree context = decl_function_context (t);
5432 if (context != cfun->decl
5433 && !SCOPE_FILE_SCOPE_P (context)
5434 && !TREE_STATIC (t)
5435 && !DECL_EXTERNAL (t))
5437 error ("local declaration from a different function");
5438 return t;
5441 #endif
5443 if (VAR_P (t) && DECL_HAS_DEBUG_EXPR_P (t))
5445 tree x = DECL_DEBUG_EXPR (t);
5446 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5447 if (addr)
5448 return addr;
5450 if ((VAR_P (t)
5451 || TREE_CODE (t) == PARM_DECL
5452 || TREE_CODE (t) == RESULT_DECL)
5453 && DECL_HAS_VALUE_EXPR_P (t))
5455 tree x = DECL_VALUE_EXPR (t);
5456 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5457 if (addr)
5458 return addr;
5461 if (!EXPR_P (t))
5463 *walk_subtrees = false;
5464 return NULL;
5467 location_t loc = EXPR_LOCATION (t);
5468 if (verify_location (blocks, loc))
5469 return t;
5471 return NULL;
5474 /* Called via walk_gimple_op. Verify locations of expressions. */
5476 static tree
5477 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5479 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5480 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5483 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5485 static void
5486 collect_subblocks (hash_set<tree> *blocks, tree block)
5488 tree t;
5489 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5491 blocks->add (t);
5492 collect_subblocks (blocks, t);
5496 /* Disable warnings about missing quoting in GCC diagnostics for
5497 the verification errors. Their format strings don't follow
5498 GCC diagnostic conventions and trigger an ICE in the end. */
5499 #if __GNUC__ >= 10
5500 # pragma GCC diagnostic push
5501 # pragma GCC diagnostic ignored "-Wformat-diag"
5502 #endif
5504 /* Verify the GIMPLE statements in the CFG of FN. */
5506 DEBUG_FUNCTION bool
5507 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow, bool ice)
5509 basic_block bb;
5510 bool err = false;
5512 timevar_push (TV_TREE_STMT_VERIFY);
5513 hash_set<void *> visited;
5514 hash_set<gimple *> visited_throwing_stmts;
5516 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5517 hash_set<tree> blocks;
5518 if (DECL_INITIAL (fn->decl))
5520 blocks.add (DECL_INITIAL (fn->decl));
5521 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5524 FOR_EACH_BB_FN (bb, fn)
5526 gimple_stmt_iterator gsi;
5527 edge_iterator ei;
5528 edge e;
5530 for (gphi_iterator gpi = gsi_start_phis (bb);
5531 !gsi_end_p (gpi);
5532 gsi_next (&gpi))
5534 gphi *phi = gpi.phi ();
5535 bool err2 = false;
5536 unsigned i;
5538 if (gimple_bb (phi) != bb)
5540 error ("gimple_bb (phi) is set to a wrong basic block");
5541 err2 = true;
5544 err2 |= verify_gimple_phi (phi);
5546 /* Only PHI arguments have locations. */
5547 if (gimple_location (phi) != UNKNOWN_LOCATION)
5549 error ("PHI node with location");
5550 err2 = true;
5553 for (i = 0; i < gimple_phi_num_args (phi); i++)
5555 tree arg = gimple_phi_arg_def (phi, i);
5556 tree addr = walk_tree (&arg, verify_node_sharing_1,
5557 &visited, NULL);
5558 if (addr)
5560 error ("incorrect sharing of tree nodes");
5561 debug_generic_expr (addr);
5562 err2 |= true;
5564 location_t loc = gimple_phi_arg_location (phi, i);
5565 if (virtual_operand_p (gimple_phi_result (phi))
5566 && loc != UNKNOWN_LOCATION)
5568 error ("virtual PHI with argument locations");
5569 err2 = true;
5571 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5572 if (addr)
5574 debug_generic_expr (addr);
5575 err2 = true;
5577 err2 |= verify_location (&blocks, loc);
5580 if (err2)
5581 debug_gimple_stmt (phi);
5582 err |= err2;
5585 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5587 gimple *stmt = gsi_stmt (gsi);
5588 bool err2 = false;
5589 struct walk_stmt_info wi;
5590 tree addr;
5591 int lp_nr;
5593 if (gimple_bb (stmt) != bb)
5595 error ("gimple_bb (stmt) is set to a wrong basic block");
5596 err2 = true;
5599 err2 |= verify_gimple_stmt (stmt);
5600 err2 |= verify_location (&blocks, gimple_location (stmt));
5602 memset (&wi, 0, sizeof (wi));
5603 wi.info = (void *) &visited;
5604 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5605 if (addr)
5607 error ("incorrect sharing of tree nodes");
5608 debug_generic_expr (addr);
5609 err2 |= true;
5612 memset (&wi, 0, sizeof (wi));
5613 wi.info = (void *) &blocks;
5614 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5615 if (addr)
5617 debug_generic_expr (addr);
5618 err2 |= true;
5621 /* If the statement is marked as part of an EH region, then it is
5622 expected that the statement could throw. Verify that when we
5623 have optimizations that simplify statements such that we prove
5624 that they cannot throw, that we update other data structures
5625 to match. */
5626 lp_nr = lookup_stmt_eh_lp (stmt);
5627 if (lp_nr != 0)
5628 visited_throwing_stmts.add (stmt);
5629 if (lp_nr > 0)
5631 if (!stmt_could_throw_p (cfun, stmt))
5633 if (verify_nothrow)
5635 error ("statement marked for throw, but doesn%'t");
5636 err2 |= true;
5639 else if (!gsi_one_before_end_p (gsi))
5641 error ("statement marked for throw in middle of block");
5642 err2 |= true;
5646 if (err2)
5647 debug_gimple_stmt (stmt);
5648 err |= err2;
5651 FOR_EACH_EDGE (e, ei, bb->succs)
5652 if (e->goto_locus != UNKNOWN_LOCATION)
5653 err |= verify_location (&blocks, e->goto_locus);
5656 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5657 eh_error_found = false;
5658 if (eh_table)
5659 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5660 (&visited_throwing_stmts);
5662 if (ice && (err || eh_error_found))
5663 internal_error ("verify_gimple failed");
5665 verify_histograms ();
5666 timevar_pop (TV_TREE_STMT_VERIFY);
5668 return (err || eh_error_found);
5672 /* Verifies that the flow information is OK. */
5674 static bool
5675 gimple_verify_flow_info (void)
5677 bool err = false;
5678 basic_block bb;
5679 gimple_stmt_iterator gsi;
5680 gimple *stmt;
5681 edge e;
5682 edge_iterator ei;
5684 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5685 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5687 error ("ENTRY_BLOCK has IL associated with it");
5688 err = true;
5691 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5692 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5694 error ("EXIT_BLOCK has IL associated with it");
5695 err = true;
5698 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5699 if (e->flags & EDGE_FALLTHRU)
5701 error ("fallthru to exit from bb %d", e->src->index);
5702 err = true;
5704 if (cfun->cfg->full_profile
5705 && !ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.initialized_p ())
5707 error ("entry block count not initialized");
5708 err = true;
5710 if (cfun->cfg->full_profile
5711 && !EXIT_BLOCK_PTR_FOR_FN (cfun)->count.initialized_p ())
5713 error ("exit block count not initialized");
5714 err = true;
5716 if (cfun->cfg->full_profile
5717 && !single_succ_edge
5718 (ENTRY_BLOCK_PTR_FOR_FN (cfun))->probability.initialized_p ())
5720 error ("probability of edge from entry block not initialized");
5721 err = true;
5725 FOR_EACH_BB_FN (bb, cfun)
5727 bool found_ctrl_stmt = false;
5729 stmt = NULL;
5731 if (cfun->cfg->full_profile)
5733 if (!bb->count.initialized_p ())
5735 error ("count of bb %d not initialized", bb->index);
5736 err = true;
5738 FOR_EACH_EDGE (e, ei, bb->succs)
5739 if (!e->probability.initialized_p ())
5741 error ("probability of edge %d->%d not initialized",
5742 bb->index, e->dest->index);
5743 err = true;
5747 /* Skip labels on the start of basic block. */
5748 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5750 tree label;
5751 gimple *prev_stmt = stmt;
5753 stmt = gsi_stmt (gsi);
5755 if (gimple_code (stmt) != GIMPLE_LABEL)
5756 break;
5758 label = gimple_label_label (as_a <glabel *> (stmt));
5759 if (prev_stmt && DECL_NONLOCAL (label))
5761 error ("nonlocal label %qD is not first in a sequence "
5762 "of labels in bb %d", label, bb->index);
5763 err = true;
5766 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5768 error ("EH landing pad label %qD is not first in a sequence "
5769 "of labels in bb %d", label, bb->index);
5770 err = true;
5773 if (label_to_block (cfun, label) != bb)
5775 error ("label %qD to block does not match in bb %d",
5776 label, bb->index);
5777 err = true;
5780 if (decl_function_context (label) != current_function_decl)
5782 error ("label %qD has incorrect context in bb %d",
5783 label, bb->index);
5784 err = true;
5788 /* Verify that body of basic block BB is free of control flow. */
5789 bool seen_nondebug_stmt = false;
5790 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5792 gimple *stmt = gsi_stmt (gsi);
5794 /* Do NOT disregard debug stmts after found_ctrl_stmt. */
5795 if (found_ctrl_stmt)
5797 error ("control flow in the middle of basic block %d",
5798 bb->index);
5799 err = true;
5802 if (stmt_ends_bb_p (stmt))
5803 found_ctrl_stmt = true;
5805 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5807 error ("label %qD in the middle of basic block %d",
5808 gimple_label_label (label_stmt), bb->index);
5809 err = true;
5812 /* Check that no statements appear between a returns_twice call
5813 and its associated abnormal edge. */
5814 if (gimple_code (stmt) == GIMPLE_CALL
5815 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
5817 const char *misplaced = NULL;
5818 /* TM is an exception: it points abnormal edges just after the
5819 call that starts a transaction, i.e. it must end the BB. */
5820 if (gimple_call_builtin_p (stmt, BUILT_IN_TM_START))
5822 if (single_succ_p (bb)
5823 && bb_has_abnormal_pred (single_succ (bb))
5824 && !gsi_one_nondebug_before_end_p (gsi))
5825 misplaced = "not last";
5827 else
5829 if (seen_nondebug_stmt
5830 && bb_has_abnormal_pred (bb))
5831 misplaced = "not first";
5833 if (misplaced)
5835 error ("returns_twice call is %s in basic block %d",
5836 misplaced, bb->index);
5837 print_gimple_stmt (stderr, stmt, 0, TDF_SLIM);
5838 err = true;
5841 if (!is_gimple_debug (stmt))
5842 seen_nondebug_stmt = true;
5845 gsi = gsi_last_nondebug_bb (bb);
5846 if (gsi_end_p (gsi))
5847 continue;
5849 stmt = gsi_stmt (gsi);
5851 if (gimple_code (stmt) == GIMPLE_LABEL)
5852 continue;
5854 if (verify_eh_edges (stmt))
5855 err = true;
5857 if (is_ctrl_stmt (stmt))
5859 FOR_EACH_EDGE (e, ei, bb->succs)
5860 if (e->flags & EDGE_FALLTHRU)
5862 error ("fallthru edge after a control statement in bb %d",
5863 bb->index);
5864 err = true;
5868 if (gimple_code (stmt) != GIMPLE_COND)
5870 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5871 after anything else but if statement. */
5872 FOR_EACH_EDGE (e, ei, bb->succs)
5873 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5875 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5876 bb->index);
5877 err = true;
5881 switch (gimple_code (stmt))
5883 case GIMPLE_COND:
5885 edge true_edge;
5886 edge false_edge;
5888 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5890 if (!true_edge
5891 || !false_edge
5892 || !(true_edge->flags & EDGE_TRUE_VALUE)
5893 || !(false_edge->flags & EDGE_FALSE_VALUE)
5894 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5895 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5896 || EDGE_COUNT (bb->succs) >= 3)
5898 error ("wrong outgoing edge flags at end of bb %d",
5899 bb->index);
5900 err = true;
5903 break;
5905 case GIMPLE_GOTO:
5906 if (simple_goto_p (stmt))
5908 error ("explicit goto at end of bb %d", bb->index);
5909 err = true;
5911 else
5913 /* FIXME. We should double check that the labels in the
5914 destination blocks have their address taken. */
5915 FOR_EACH_EDGE (e, ei, bb->succs)
5916 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5917 | EDGE_FALSE_VALUE))
5918 || !(e->flags & EDGE_ABNORMAL))
5920 error ("wrong outgoing edge flags at end of bb %d",
5921 bb->index);
5922 err = true;
5925 break;
5927 case GIMPLE_CALL:
5928 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5929 break;
5930 /* fallthru */
5931 case GIMPLE_RETURN:
5932 if (!single_succ_p (bb)
5933 || (single_succ_edge (bb)->flags
5934 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5935 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5937 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5938 err = true;
5940 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5942 error ("return edge does not point to exit in bb %d",
5943 bb->index);
5944 err = true;
5946 break;
5948 case GIMPLE_SWITCH:
5950 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5951 tree prev;
5952 edge e;
5953 size_t i, n;
5955 n = gimple_switch_num_labels (switch_stmt);
5957 /* Mark all the destination basic blocks. */
5958 for (i = 0; i < n; ++i)
5960 basic_block label_bb = gimple_switch_label_bb (cfun, switch_stmt, i);
5961 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5962 label_bb->aux = (void *)1;
5965 /* Verify that the case labels are sorted. */
5966 prev = gimple_switch_label (switch_stmt, 0);
5967 for (i = 1; i < n; ++i)
5969 tree c = gimple_switch_label (switch_stmt, i);
5970 if (!CASE_LOW (c))
5972 error ("found default case not at the start of "
5973 "case vector");
5974 err = true;
5975 continue;
5977 if (CASE_LOW (prev)
5978 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5980 error ("case labels not sorted: ");
5981 print_generic_expr (stderr, prev);
5982 fprintf (stderr," is greater than ");
5983 print_generic_expr (stderr, c);
5984 fprintf (stderr," but comes before it.\n");
5985 err = true;
5987 prev = c;
5989 /* VRP will remove the default case if it can prove it will
5990 never be executed. So do not verify there always exists
5991 a default case here. */
5993 FOR_EACH_EDGE (e, ei, bb->succs)
5995 if (!e->dest->aux)
5997 error ("extra outgoing edge %d->%d",
5998 bb->index, e->dest->index);
5999 err = true;
6002 e->dest->aux = (void *)2;
6003 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
6004 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
6006 error ("wrong outgoing edge flags at end of bb %d",
6007 bb->index);
6008 err = true;
6012 /* Check that we have all of them. */
6013 for (i = 0; i < n; ++i)
6015 basic_block label_bb = gimple_switch_label_bb (cfun,
6016 switch_stmt, i);
6018 if (label_bb->aux != (void *)2)
6020 error ("missing edge %i->%i", bb->index, label_bb->index);
6021 err = true;
6025 FOR_EACH_EDGE (e, ei, bb->succs)
6026 e->dest->aux = (void *)0;
6028 break;
6030 case GIMPLE_EH_DISPATCH:
6031 if (verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt)))
6032 err = true;
6033 break;
6035 default:
6036 break;
6040 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
6041 verify_dominators (CDI_DOMINATORS);
6043 return err;
6046 #if __GNUC__ >= 10
6047 # pragma GCC diagnostic pop
6048 #endif
6050 /* Updates phi nodes after creating a forwarder block joined
6051 by edge FALLTHRU. */
6053 static void
6054 gimple_make_forwarder_block (edge fallthru)
6056 edge e;
6057 edge_iterator ei;
6058 basic_block dummy, bb;
6059 tree var;
6060 gphi_iterator gsi;
6061 bool forward_location_p;
6063 dummy = fallthru->src;
6064 bb = fallthru->dest;
6066 if (single_pred_p (bb))
6067 return;
6069 /* We can forward location info if we have only one predecessor. */
6070 forward_location_p = single_pred_p (dummy);
6072 /* If we redirected a branch we must create new PHI nodes at the
6073 start of BB. */
6074 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
6076 gphi *phi, *new_phi;
6078 phi = gsi.phi ();
6079 var = gimple_phi_result (phi);
6080 new_phi = create_phi_node (var, bb);
6081 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
6082 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
6083 forward_location_p
6084 ? gimple_phi_arg_location (phi, 0) : UNKNOWN_LOCATION);
6087 /* Add the arguments we have stored on edges. */
6088 FOR_EACH_EDGE (e, ei, bb->preds)
6090 if (e == fallthru)
6091 continue;
6093 flush_pending_stmts (e);
6098 /* Return a non-special label in the head of basic block BLOCK.
6099 Create one if it doesn't exist. */
6101 tree
6102 gimple_block_label (basic_block bb)
6104 gimple_stmt_iterator i, s = gsi_start_bb (bb);
6105 bool first = true;
6106 tree label;
6107 glabel *stmt;
6109 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
6111 stmt = dyn_cast <glabel *> (gsi_stmt (i));
6112 if (!stmt)
6113 break;
6114 label = gimple_label_label (stmt);
6115 if (!DECL_NONLOCAL (label))
6117 if (!first)
6118 gsi_move_before (&i, &s);
6119 return label;
6123 label = create_artificial_label (UNKNOWN_LOCATION);
6124 stmt = gimple_build_label (label);
6125 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
6126 return label;
6130 /* Attempt to perform edge redirection by replacing a possibly complex
6131 jump instruction by a goto or by removing the jump completely.
6132 This can apply only if all edges now point to the same block. The
6133 parameters and return values are equivalent to
6134 redirect_edge_and_branch. */
6136 static edge
6137 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
6139 basic_block src = e->src;
6140 gimple_stmt_iterator i;
6141 gimple *stmt;
6143 /* We can replace or remove a complex jump only when we have exactly
6144 two edges. */
6145 if (EDGE_COUNT (src->succs) != 2
6146 /* Verify that all targets will be TARGET. Specifically, the
6147 edge that is not E must also go to TARGET. */
6148 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
6149 return NULL;
6151 i = gsi_last_bb (src);
6152 if (gsi_end_p (i))
6153 return NULL;
6155 stmt = gsi_stmt (i);
6157 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
6159 gsi_remove (&i, true);
6160 e = ssa_redirect_edge (e, target);
6161 e->flags = EDGE_FALLTHRU;
6162 return e;
6165 return NULL;
6169 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
6170 edge representing the redirected branch. */
6172 static edge
6173 gimple_redirect_edge_and_branch (edge e, basic_block dest)
6175 basic_block bb = e->src;
6176 gimple_stmt_iterator gsi;
6177 edge ret;
6178 gimple *stmt;
6180 if (e->flags & EDGE_ABNORMAL)
6181 return NULL;
6183 if (e->dest == dest)
6184 return NULL;
6186 if (e->flags & EDGE_EH)
6187 return redirect_eh_edge (e, dest);
6189 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
6191 ret = gimple_try_redirect_by_replacing_jump (e, dest);
6192 if (ret)
6193 return ret;
6196 gsi = gsi_last_nondebug_bb (bb);
6197 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
6199 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
6201 case GIMPLE_COND:
6202 /* For COND_EXPR, we only need to redirect the edge. */
6203 break;
6205 case GIMPLE_GOTO:
6206 /* No non-abnormal edges should lead from a non-simple goto, and
6207 simple ones should be represented implicitly. */
6208 gcc_unreachable ();
6210 case GIMPLE_SWITCH:
6212 gswitch *switch_stmt = as_a <gswitch *> (stmt);
6213 tree label = gimple_block_label (dest);
6214 tree cases = get_cases_for_edge (e, switch_stmt);
6216 /* If we have a list of cases associated with E, then use it
6217 as it's a lot faster than walking the entire case vector. */
6218 if (cases)
6220 edge e2 = find_edge (e->src, dest);
6221 tree last, first;
6223 first = cases;
6224 while (cases)
6226 last = cases;
6227 CASE_LABEL (cases) = label;
6228 cases = CASE_CHAIN (cases);
6231 /* If there was already an edge in the CFG, then we need
6232 to move all the cases associated with E to E2. */
6233 if (e2)
6235 tree cases2 = get_cases_for_edge (e2, switch_stmt);
6237 CASE_CHAIN (last) = CASE_CHAIN (cases2);
6238 CASE_CHAIN (cases2) = first;
6240 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
6242 else
6244 size_t i, n = gimple_switch_num_labels (switch_stmt);
6246 for (i = 0; i < n; i++)
6248 tree elt = gimple_switch_label (switch_stmt, i);
6249 if (label_to_block (cfun, CASE_LABEL (elt)) == e->dest)
6250 CASE_LABEL (elt) = label;
6254 break;
6256 case GIMPLE_ASM:
6258 gasm *asm_stmt = as_a <gasm *> (stmt);
6259 int i, n = gimple_asm_nlabels (asm_stmt);
6260 tree label = NULL;
6262 for (i = 0; i < n; ++i)
6264 tree cons = gimple_asm_label_op (asm_stmt, i);
6265 if (label_to_block (cfun, TREE_VALUE (cons)) == e->dest)
6267 if (!label)
6268 label = gimple_block_label (dest);
6269 TREE_VALUE (cons) = label;
6273 /* If we didn't find any label matching the former edge in the
6274 asm labels, we must be redirecting the fallthrough
6275 edge. */
6276 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
6278 break;
6280 case GIMPLE_RETURN:
6281 gsi_remove (&gsi, true);
6282 e->flags |= EDGE_FALLTHRU;
6283 break;
6285 case GIMPLE_OMP_RETURN:
6286 case GIMPLE_OMP_CONTINUE:
6287 case GIMPLE_OMP_SECTIONS_SWITCH:
6288 case GIMPLE_OMP_FOR:
6289 /* The edges from OMP constructs can be simply redirected. */
6290 break;
6292 case GIMPLE_EH_DISPATCH:
6293 if (!(e->flags & EDGE_FALLTHRU))
6294 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
6295 break;
6297 case GIMPLE_TRANSACTION:
6298 if (e->flags & EDGE_TM_ABORT)
6299 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
6300 gimple_block_label (dest));
6301 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
6302 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
6303 gimple_block_label (dest));
6304 else
6305 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
6306 gimple_block_label (dest));
6307 break;
6309 default:
6310 /* Otherwise it must be a fallthru edge, and we don't need to
6311 do anything besides redirecting it. */
6312 gcc_assert (e->flags & EDGE_FALLTHRU);
6313 break;
6316 /* Update/insert PHI nodes as necessary. */
6318 /* Now update the edges in the CFG. */
6319 e = ssa_redirect_edge (e, dest);
6321 return e;
6324 /* Returns true if it is possible to remove edge E by redirecting
6325 it to the destination of the other edge from E->src. */
6327 static bool
6328 gimple_can_remove_branch_p (const_edge e)
6330 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
6331 return false;
6333 return true;
6336 /* Simple wrapper, as we can always redirect fallthru edges. */
6338 static basic_block
6339 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
6341 e = gimple_redirect_edge_and_branch (e, dest);
6342 gcc_assert (e);
6344 return NULL;
6348 /* Splits basic block BB after statement STMT (but at least after the
6349 labels). If STMT is NULL, BB is split just after the labels. */
6351 static basic_block
6352 gimple_split_block (basic_block bb, void *stmt)
6354 gimple_stmt_iterator gsi;
6355 gimple_stmt_iterator gsi_tgt;
6356 gimple_seq list;
6357 basic_block new_bb;
6358 edge e;
6359 edge_iterator ei;
6361 new_bb = create_empty_bb (bb);
6363 /* Redirect the outgoing edges. */
6364 new_bb->succs = bb->succs;
6365 bb->succs = NULL;
6366 FOR_EACH_EDGE (e, ei, new_bb->succs)
6367 e->src = new_bb;
6369 /* Get a stmt iterator pointing to the first stmt to move. */
6370 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
6371 gsi = gsi_after_labels (bb);
6372 else
6374 gsi = gsi_for_stmt ((gimple *) stmt);
6375 gsi_next (&gsi);
6378 /* Move everything from GSI to the new basic block. */
6379 if (gsi_end_p (gsi))
6380 return new_bb;
6382 /* Split the statement list - avoid re-creating new containers as this
6383 brings ugly quadratic memory consumption in the inliner.
6384 (We are still quadratic since we need to update stmt BB pointers,
6385 sadly.) */
6386 gsi_split_seq_before (&gsi, &list);
6387 set_bb_seq (new_bb, list);
6388 for (gsi_tgt = gsi_start (list);
6389 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
6390 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6392 return new_bb;
6396 /* Moves basic block BB after block AFTER. */
6398 static bool
6399 gimple_move_block_after (basic_block bb, basic_block after)
6401 if (bb->prev_bb == after)
6402 return true;
6404 unlink_block (bb);
6405 link_block (bb, after);
6407 return true;
6411 /* Return TRUE if block BB has no executable statements, otherwise return
6412 FALSE. */
6414 static bool
6415 gimple_empty_block_p (basic_block bb)
6417 /* BB must have no executable statements. */
6418 gimple_stmt_iterator gsi = gsi_after_labels (bb);
6419 if (phi_nodes (bb))
6420 return false;
6421 while (!gsi_end_p (gsi))
6423 gimple *stmt = gsi_stmt (gsi);
6424 if (is_gimple_debug (stmt))
6426 else if (gimple_code (stmt) == GIMPLE_NOP
6427 || gimple_code (stmt) == GIMPLE_PREDICT)
6429 else
6430 return false;
6431 gsi_next (&gsi);
6433 return true;
6437 /* Split a basic block if it ends with a conditional branch and if the
6438 other part of the block is not empty. */
6440 static basic_block
6441 gimple_split_block_before_cond_jump (basic_block bb)
6443 gimple *last, *split_point;
6444 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6445 if (gsi_end_p (gsi))
6446 return NULL;
6447 last = gsi_stmt (gsi);
6448 if (gimple_code (last) != GIMPLE_COND
6449 && gimple_code (last) != GIMPLE_SWITCH)
6450 return NULL;
6451 gsi_prev (&gsi);
6452 split_point = gsi_stmt (gsi);
6453 return split_block (bb, split_point)->dest;
6457 /* Return true if basic_block can be duplicated. */
6459 static bool
6460 gimple_can_duplicate_bb_p (const_basic_block bb)
6462 gimple *last = last_nondebug_stmt (CONST_CAST_BB (bb));
6464 /* Do checks that can only fail for the last stmt, to minimize the work in the
6465 stmt loop. */
6466 if (last) {
6467 /* A transaction is a single entry multiple exit region. It
6468 must be duplicated in its entirety or not at all. */
6469 if (gimple_code (last) == GIMPLE_TRANSACTION)
6470 return false;
6472 /* An IFN_UNIQUE call must be duplicated as part of its group,
6473 or not at all. */
6474 if (is_gimple_call (last)
6475 && gimple_call_internal_p (last)
6476 && gimple_call_internal_unique_p (last))
6477 return false;
6480 for (gimple_stmt_iterator gsi = gsi_start_bb (CONST_CAST_BB (bb));
6481 !gsi_end_p (gsi); gsi_next (&gsi))
6483 gimple *g = gsi_stmt (gsi);
6485 /* Prohibit duplication of returns_twice calls, otherwise associated
6486 abnormal edges also need to be duplicated properly.
6487 An IFN_GOMP_SIMT_ENTER_ALLOC/IFN_GOMP_SIMT_EXIT call must be
6488 duplicated as part of its group, or not at all.
6489 The IFN_GOMP_SIMT_VOTE_ANY and IFN_GOMP_SIMT_XCHG_* are part of such a
6490 group, so the same holds there. */
6491 if (is_gimple_call (g)
6492 && (gimple_call_flags (g) & ECF_RETURNS_TWICE
6493 || gimple_call_internal_p (g, IFN_GOMP_SIMT_ENTER_ALLOC)
6494 || gimple_call_internal_p (g, IFN_GOMP_SIMT_EXIT)
6495 || gimple_call_internal_p (g, IFN_GOMP_SIMT_VOTE_ANY)
6496 || gimple_call_internal_p (g, IFN_GOMP_SIMT_XCHG_BFLY)
6497 || gimple_call_internal_p (g, IFN_GOMP_SIMT_XCHG_IDX)))
6498 return false;
6501 return true;
6504 /* Create a duplicate of the basic block BB. NOTE: This does not
6505 preserve SSA form. */
6507 static basic_block
6508 gimple_duplicate_bb (basic_block bb, copy_bb_data *id)
6510 basic_block new_bb;
6511 gimple_stmt_iterator gsi_tgt;
6513 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6515 /* Copy the PHI nodes. We ignore PHI node arguments here because
6516 the incoming edges have not been setup yet. */
6517 for (gphi_iterator gpi = gsi_start_phis (bb);
6518 !gsi_end_p (gpi);
6519 gsi_next (&gpi))
6521 gphi *phi, *copy;
6522 phi = gpi.phi ();
6523 copy = create_phi_node (NULL_TREE, new_bb);
6524 create_new_def_for (gimple_phi_result (phi), copy,
6525 gimple_phi_result_ptr (copy));
6526 gimple_set_uid (copy, gimple_uid (phi));
6529 gsi_tgt = gsi_start_bb (new_bb);
6530 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6531 !gsi_end_p (gsi);
6532 gsi_next (&gsi))
6534 def_operand_p def_p;
6535 ssa_op_iter op_iter;
6536 tree lhs;
6537 gimple *stmt, *copy;
6539 stmt = gsi_stmt (gsi);
6540 if (gimple_code (stmt) == GIMPLE_LABEL)
6541 continue;
6543 /* Don't duplicate label debug stmts. */
6544 if (gimple_debug_bind_p (stmt)
6545 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6546 == LABEL_DECL)
6547 continue;
6549 /* Create a new copy of STMT and duplicate STMT's virtual
6550 operands. */
6551 copy = gimple_copy (stmt);
6552 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6554 maybe_duplicate_eh_stmt (copy, stmt);
6555 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6557 /* When copying around a stmt writing into a local non-user
6558 aggregate, make sure it won't share stack slot with other
6559 vars. */
6560 lhs = gimple_get_lhs (stmt);
6561 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6563 tree base = get_base_address (lhs);
6564 if (base
6565 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6566 && DECL_IGNORED_P (base)
6567 && !TREE_STATIC (base)
6568 && !DECL_EXTERNAL (base)
6569 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6570 DECL_NONSHAREABLE (base) = 1;
6573 /* If requested remap dependence info of cliques brought in
6574 via inlining. */
6575 if (id)
6576 for (unsigned i = 0; i < gimple_num_ops (copy); ++i)
6578 tree op = gimple_op (copy, i);
6579 if (!op)
6580 continue;
6581 if (TREE_CODE (op) == ADDR_EXPR
6582 || TREE_CODE (op) == WITH_SIZE_EXPR)
6583 op = TREE_OPERAND (op, 0);
6584 while (handled_component_p (op))
6585 op = TREE_OPERAND (op, 0);
6586 if ((TREE_CODE (op) == MEM_REF
6587 || TREE_CODE (op) == TARGET_MEM_REF)
6588 && MR_DEPENDENCE_CLIQUE (op) > 1
6589 && MR_DEPENDENCE_CLIQUE (op) != bb->loop_father->owned_clique)
6591 if (!id->dependence_map)
6592 id->dependence_map = new hash_map<dependence_hash,
6593 unsigned short>;
6594 bool existed;
6595 unsigned short &newc = id->dependence_map->get_or_insert
6596 (MR_DEPENDENCE_CLIQUE (op), &existed);
6597 if (!existed)
6599 gcc_assert (MR_DEPENDENCE_CLIQUE (op) <= cfun->last_clique);
6600 newc = get_new_clique (cfun);
6602 MR_DEPENDENCE_CLIQUE (op) = newc;
6606 /* Create new names for all the definitions created by COPY and
6607 add replacement mappings for each new name. */
6608 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6609 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6612 return new_bb;
6615 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6617 static void
6618 add_phi_args_after_copy_edge (edge e_copy)
6620 basic_block bb, bb_copy = e_copy->src, dest;
6621 edge e;
6622 edge_iterator ei;
6623 gphi *phi, *phi_copy;
6624 tree def;
6625 gphi_iterator psi, psi_copy;
6627 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6628 return;
6630 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6632 if (e_copy->dest->flags & BB_DUPLICATED)
6633 dest = get_bb_original (e_copy->dest);
6634 else
6635 dest = e_copy->dest;
6637 e = find_edge (bb, dest);
6638 if (!e)
6640 /* During loop unrolling the target of the latch edge is copied.
6641 In this case we are not looking for edge to dest, but to
6642 duplicated block whose original was dest. */
6643 FOR_EACH_EDGE (e, ei, bb->succs)
6645 if ((e->dest->flags & BB_DUPLICATED)
6646 && get_bb_original (e->dest) == dest)
6647 break;
6650 gcc_assert (e != NULL);
6653 for (psi = gsi_start_phis (e->dest),
6654 psi_copy = gsi_start_phis (e_copy->dest);
6655 !gsi_end_p (psi);
6656 gsi_next (&psi), gsi_next (&psi_copy))
6658 phi = psi.phi ();
6659 phi_copy = psi_copy.phi ();
6660 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6661 add_phi_arg (phi_copy, def, e_copy,
6662 gimple_phi_arg_location_from_edge (phi, e));
6667 /* Basic block BB_COPY was created by code duplication. Add phi node
6668 arguments for edges going out of BB_COPY. The blocks that were
6669 duplicated have BB_DUPLICATED set. */
6671 void
6672 add_phi_args_after_copy_bb (basic_block bb_copy)
6674 edge e_copy;
6675 edge_iterator ei;
6677 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6679 add_phi_args_after_copy_edge (e_copy);
6683 /* Blocks in REGION_COPY array of length N_REGION were created by
6684 duplication of basic blocks. Add phi node arguments for edges
6685 going from these blocks. If E_COPY is not NULL, also add
6686 phi node arguments for its destination.*/
6688 void
6689 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6690 edge e_copy)
6692 unsigned i;
6694 for (i = 0; i < n_region; i++)
6695 region_copy[i]->flags |= BB_DUPLICATED;
6697 for (i = 0; i < n_region; i++)
6698 add_phi_args_after_copy_bb (region_copy[i]);
6699 if (e_copy)
6700 add_phi_args_after_copy_edge (e_copy);
6702 for (i = 0; i < n_region; i++)
6703 region_copy[i]->flags &= ~BB_DUPLICATED;
6706 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6707 important exit edge EXIT. By important we mean that no SSA name defined
6708 inside region is live over the other exit edges of the region. All entry
6709 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6710 to the duplicate of the region. Dominance and loop information is
6711 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6712 UPDATE_DOMINANCE is false then we assume that the caller will update the
6713 dominance information after calling this function. The new basic
6714 blocks are stored to REGION_COPY in the same order as they had in REGION,
6715 provided that REGION_COPY is not NULL.
6716 The function returns false if it is unable to copy the region,
6717 true otherwise.
6719 It is callers responsibility to update profile. */
6721 bool
6722 gimple_duplicate_seme_region (edge entry, edge exit,
6723 basic_block *region, unsigned n_region,
6724 basic_block *region_copy,
6725 bool update_dominance)
6727 unsigned i;
6728 bool free_region_copy = false, copying_header = false;
6729 class loop *loop = entry->dest->loop_father;
6730 edge exit_copy;
6731 edge redirected;
6733 if (!can_copy_bbs_p (region, n_region))
6734 return false;
6736 /* Some sanity checking. Note that we do not check for all possible
6737 missuses of the functions. I.e. if you ask to copy something weird,
6738 it will work, but the state of structures probably will not be
6739 correct. */
6740 for (i = 0; i < n_region; i++)
6742 /* We do not handle subloops, i.e. all the blocks must belong to the
6743 same loop. */
6744 if (region[i]->loop_father != loop)
6745 return false;
6747 if (region[i] != entry->dest
6748 && region[i] == loop->header)
6749 return false;
6752 /* In case the function is used for loop header copying (which is the primary
6753 use), ensure that EXIT and its copy will be new latch and entry edges. */
6754 if (loop->header == entry->dest)
6756 copying_header = true;
6758 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6759 return false;
6761 for (i = 0; i < n_region; i++)
6762 if (region[i] != exit->src
6763 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6764 return false;
6767 initialize_original_copy_tables ();
6769 if (copying_header)
6770 set_loop_copy (loop, loop_outer (loop));
6771 else
6772 set_loop_copy (loop, loop);
6774 if (!region_copy)
6776 region_copy = XNEWVEC (basic_block, n_region);
6777 free_region_copy = true;
6780 /* Record blocks outside the region that are dominated by something
6781 inside. */
6782 auto_vec<basic_block> doms;
6783 if (update_dominance)
6784 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6786 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6787 split_edge_bb_loc (entry), update_dominance);
6789 if (copying_header)
6791 loop->header = exit->dest;
6792 loop->latch = exit->src;
6795 /* Redirect the entry and add the phi node arguments. */
6796 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6797 gcc_assert (redirected != NULL);
6798 flush_pending_stmts (entry);
6800 /* Concerning updating of dominators: We must recount dominators
6801 for entry block and its copy. Anything that is outside of the
6802 region, but was dominated by something inside needs recounting as
6803 well. */
6804 if (update_dominance)
6806 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6807 doms.safe_push (get_bb_original (entry->dest));
6808 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6811 /* Add the other PHI node arguments. */
6812 add_phi_args_after_copy (region_copy, n_region, NULL);
6814 if (free_region_copy)
6815 free (region_copy);
6817 free_original_copy_tables ();
6818 return true;
6821 /* Checks if BB is part of the region defined by N_REGION BBS. */
6822 static bool
6823 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6825 unsigned int n;
6827 for (n = 0; n < n_region; n++)
6829 if (bb == bbs[n])
6830 return true;
6832 return false;
6836 /* For each PHI in BB, copy the argument associated with SRC_E to TGT_E.
6837 Assuming the argument exists, just does not have a value. */
6839 void
6840 copy_phi_arg_into_existing_phi (edge src_e, edge tgt_e)
6842 int src_idx = src_e->dest_idx;
6843 int tgt_idx = tgt_e->dest_idx;
6845 /* Iterate over each PHI in e->dest. */
6846 for (gphi_iterator gsi = gsi_start_phis (src_e->dest),
6847 gsi2 = gsi_start_phis (tgt_e->dest);
6848 !gsi_end_p (gsi);
6849 gsi_next (&gsi), gsi_next (&gsi2))
6851 gphi *src_phi = gsi.phi ();
6852 gphi *dest_phi = gsi2.phi ();
6853 tree val = gimple_phi_arg_def (src_phi, src_idx);
6854 location_t locus = gimple_phi_arg_location (src_phi, src_idx);
6856 SET_PHI_ARG_DEF (dest_phi, tgt_idx, val);
6857 gimple_phi_arg_set_location (dest_phi, tgt_idx, locus);
6861 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6862 are stored to REGION_COPY in the same order in that they appear
6863 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6864 the region, EXIT an exit from it. The condition guarding EXIT
6865 is moved to ENTRY. Returns true if duplication succeeds, false
6866 otherwise.
6868 For example,
6870 some_code;
6871 if (cond)
6873 else
6876 is transformed to
6878 if (cond)
6880 some_code;
6883 else
6885 some_code;
6890 bool
6891 gimple_duplicate_sese_tail (edge entry, edge exit,
6892 basic_block *region, unsigned n_region,
6893 basic_block *region_copy)
6895 unsigned i;
6896 bool free_region_copy = false;
6897 class loop *loop = exit->dest->loop_father;
6898 class loop *orig_loop = entry->dest->loop_father;
6899 basic_block switch_bb, entry_bb, nentry_bb;
6900 profile_count total_count = profile_count::uninitialized (),
6901 exit_count = profile_count::uninitialized ();
6902 edge exits[2], nexits[2], e;
6903 gimple_stmt_iterator gsi;
6904 edge sorig, snew;
6905 basic_block exit_bb;
6906 class loop *target, *aloop, *cloop;
6908 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6909 exits[0] = exit;
6910 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6912 if (!can_copy_bbs_p (region, n_region))
6913 return false;
6915 initialize_original_copy_tables ();
6916 set_loop_copy (orig_loop, loop);
6918 target= loop;
6919 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6921 if (bb_part_of_region_p (aloop->header, region, n_region))
6923 cloop = duplicate_loop (aloop, target);
6924 duplicate_subloops (aloop, cloop);
6928 if (!region_copy)
6930 region_copy = XNEWVEC (basic_block, n_region);
6931 free_region_copy = true;
6934 gcc_assert (!need_ssa_update_p (cfun));
6936 /* Record blocks outside the region that are dominated by something
6937 inside. */
6938 auto_vec<basic_block> doms = get_dominated_by_region (CDI_DOMINATORS, region,
6939 n_region);
6941 total_count = exit->src->count;
6942 exit_count = exit->count ();
6943 /* Fix up corner cases, to avoid division by zero or creation of negative
6944 frequencies. */
6945 if (exit_count > total_count)
6946 exit_count = total_count;
6948 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6949 split_edge_bb_loc (exit), true);
6950 if (total_count.initialized_p () && exit_count.initialized_p ())
6952 scale_bbs_frequencies_profile_count (region, n_region,
6953 total_count - exit_count,
6954 total_count);
6955 scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6956 total_count);
6959 /* Create the switch block, and put the exit condition to it. */
6960 entry_bb = entry->dest;
6961 nentry_bb = get_bb_copy (entry_bb);
6962 if (!*gsi_last_bb (entry->src)
6963 || !stmt_ends_bb_p (*gsi_last_bb (entry->src)))
6964 switch_bb = entry->src;
6965 else
6966 switch_bb = split_edge (entry);
6967 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6969 gcond *cond_stmt = as_a <gcond *> (*gsi_last_bb (exit->src));
6970 cond_stmt = as_a <gcond *> (gimple_copy (cond_stmt));
6972 gsi = gsi_last_bb (switch_bb);
6973 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6975 sorig = single_succ_edge (switch_bb);
6976 sorig->flags = exits[1]->flags;
6977 sorig->probability = exits[1]->probability;
6978 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6979 snew->probability = exits[0]->probability;
6982 /* Register the new edge from SWITCH_BB in loop exit lists. */
6983 rescan_loop_exit (snew, true, false);
6985 /* Add the PHI node arguments. */
6986 add_phi_args_after_copy (region_copy, n_region, snew);
6988 /* Get rid of now superfluous conditions and associated edges (and phi node
6989 arguments). */
6990 exit_bb = exit->dest;
6992 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6993 PENDING_STMT (e) = NULL;
6995 /* The latch of ORIG_LOOP was copied, and so was the backedge
6996 to the original header. We redirect this backedge to EXIT_BB. */
6997 for (i = 0; i < n_region; i++)
6998 if (get_bb_original (region_copy[i]) == orig_loop->latch)
7000 gcc_assert (single_succ_edge (region_copy[i]));
7001 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
7002 PENDING_STMT (e) = NULL;
7003 copy_phi_arg_into_existing_phi (nexits[0], e);
7005 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
7006 PENDING_STMT (e) = NULL;
7008 /* Anything that is outside of the region, but was dominated by something
7009 inside needs to update dominance info. */
7010 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
7012 if (free_region_copy)
7013 free (region_copy);
7015 free_original_copy_tables ();
7016 return true;
7019 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
7020 adding blocks when the dominator traversal reaches EXIT. This
7021 function silently assumes that ENTRY strictly dominates EXIT. */
7023 void
7024 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
7025 vec<basic_block> *bbs_p)
7027 basic_block son;
7029 for (son = first_dom_son (CDI_DOMINATORS, entry);
7030 son;
7031 son = next_dom_son (CDI_DOMINATORS, son))
7033 bbs_p->safe_push (son);
7034 if (son != exit)
7035 gather_blocks_in_sese_region (son, exit, bbs_p);
7039 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
7040 The duplicates are recorded in VARS_MAP. */
7042 static void
7043 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
7044 tree to_context)
7046 tree t = *tp, new_t;
7047 struct function *f = DECL_STRUCT_FUNCTION (to_context);
7049 if (DECL_CONTEXT (t) == to_context)
7050 return;
7052 bool existed;
7053 tree &loc = vars_map->get_or_insert (t, &existed);
7055 if (!existed)
7057 if (SSA_VAR_P (t))
7059 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
7060 add_local_decl (f, new_t);
7062 else
7064 gcc_assert (TREE_CODE (t) == CONST_DECL);
7065 new_t = copy_node (t);
7067 DECL_CONTEXT (new_t) = to_context;
7069 loc = new_t;
7071 else
7072 new_t = loc;
7074 *tp = new_t;
7078 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
7079 VARS_MAP maps old ssa names and var_decls to the new ones. */
7081 static tree
7082 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
7083 tree to_context)
7085 tree new_name;
7087 gcc_assert (!virtual_operand_p (name));
7089 tree *loc = vars_map->get (name);
7091 if (!loc)
7093 tree decl = SSA_NAME_VAR (name);
7094 if (decl)
7096 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
7097 replace_by_duplicate_decl (&decl, vars_map, to_context);
7098 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
7099 decl, SSA_NAME_DEF_STMT (name));
7101 else
7102 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
7103 name, SSA_NAME_DEF_STMT (name));
7105 /* Now that we've used the def stmt to define new_name, make sure it
7106 doesn't define name anymore. */
7107 SSA_NAME_DEF_STMT (name) = NULL;
7109 vars_map->put (name, new_name);
7111 else
7112 new_name = *loc;
7114 return new_name;
7117 struct move_stmt_d
7119 tree orig_block;
7120 tree new_block;
7121 tree from_context;
7122 tree to_context;
7123 hash_map<tree, tree> *vars_map;
7124 htab_t new_label_map;
7125 hash_map<void *, void *> *eh_map;
7126 bool remap_decls_p;
7129 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
7130 contained in *TP if it has been ORIG_BLOCK previously and change the
7131 DECL_CONTEXT of every local variable referenced in *TP. */
7133 static tree
7134 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
7136 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
7137 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
7138 tree t = *tp;
7140 if (EXPR_P (t))
7142 tree block = TREE_BLOCK (t);
7143 if (block == NULL_TREE)
7145 else if (block == p->orig_block
7146 || p->orig_block == NULL_TREE)
7148 /* tree_node_can_be_shared says we can share invariant
7149 addresses but unshare_expr copies them anyways. Make sure
7150 to unshare before adjusting the block in place - we do not
7151 always see a copy here. */
7152 if (TREE_CODE (t) == ADDR_EXPR
7153 && is_gimple_min_invariant (t))
7154 *tp = t = unshare_expr (t);
7155 TREE_SET_BLOCK (t, p->new_block);
7157 else if (flag_checking)
7159 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
7160 block = BLOCK_SUPERCONTEXT (block);
7161 gcc_assert (block == p->orig_block);
7164 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
7166 if (TREE_CODE (t) == SSA_NAME)
7167 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
7168 else if (TREE_CODE (t) == PARM_DECL
7169 && gimple_in_ssa_p (cfun))
7170 *tp = *(p->vars_map->get (t));
7171 else if (TREE_CODE (t) == LABEL_DECL)
7173 if (p->new_label_map)
7175 struct tree_map in, *out;
7176 in.base.from = t;
7177 out = (struct tree_map *)
7178 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
7179 if (out)
7180 *tp = t = out->to;
7183 /* For FORCED_LABELs we can end up with references from other
7184 functions if some SESE regions are outlined. It is UB to
7185 jump in between them, but they could be used just for printing
7186 addresses etc. In that case, DECL_CONTEXT on the label should
7187 be the function containing the glabel stmt with that LABEL_DECL,
7188 rather than whatever function a reference to the label was seen
7189 last time. */
7190 if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
7191 DECL_CONTEXT (t) = p->to_context;
7193 else if (p->remap_decls_p)
7195 /* Replace T with its duplicate. T should no longer appear in the
7196 parent function, so this looks wasteful; however, it may appear
7197 in referenced_vars, and more importantly, as virtual operands of
7198 statements, and in alias lists of other variables. It would be
7199 quite difficult to expunge it from all those places. ??? It might
7200 suffice to do this for addressable variables. */
7201 if ((VAR_P (t) && !is_global_var (t))
7202 || TREE_CODE (t) == CONST_DECL)
7203 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
7205 *walk_subtrees = 0;
7207 else if (TYPE_P (t))
7208 *walk_subtrees = 0;
7210 return NULL_TREE;
7213 /* Helper for move_stmt_r. Given an EH region number for the source
7214 function, map that to the duplicate EH regio number in the dest. */
7216 static int
7217 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
7219 eh_region old_r, new_r;
7221 old_r = get_eh_region_from_number (old_nr);
7222 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
7224 return new_r->index;
7227 /* Similar, but operate on INTEGER_CSTs. */
7229 static tree
7230 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
7232 int old_nr, new_nr;
7234 old_nr = tree_to_shwi (old_t_nr);
7235 new_nr = move_stmt_eh_region_nr (old_nr, p);
7237 return build_int_cst (integer_type_node, new_nr);
7240 /* Like move_stmt_op, but for gimple statements.
7242 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
7243 contained in the current statement in *GSI_P and change the
7244 DECL_CONTEXT of every local variable referenced in the current
7245 statement. */
7247 static tree
7248 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
7249 struct walk_stmt_info *wi)
7251 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
7252 gimple *stmt = gsi_stmt (*gsi_p);
7253 tree block = gimple_block (stmt);
7255 if (block == p->orig_block
7256 || (p->orig_block == NULL_TREE
7257 && block != NULL_TREE))
7258 gimple_set_block (stmt, p->new_block);
7260 switch (gimple_code (stmt))
7262 case GIMPLE_CALL:
7263 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
7265 tree r, fndecl = gimple_call_fndecl (stmt);
7266 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
7267 switch (DECL_FUNCTION_CODE (fndecl))
7269 case BUILT_IN_EH_COPY_VALUES:
7270 r = gimple_call_arg (stmt, 1);
7271 r = move_stmt_eh_region_tree_nr (r, p);
7272 gimple_call_set_arg (stmt, 1, r);
7273 /* FALLTHRU */
7275 case BUILT_IN_EH_POINTER:
7276 case BUILT_IN_EH_FILTER:
7277 r = gimple_call_arg (stmt, 0);
7278 r = move_stmt_eh_region_tree_nr (r, p);
7279 gimple_call_set_arg (stmt, 0, r);
7280 break;
7282 default:
7283 break;
7286 break;
7288 case GIMPLE_RESX:
7290 gresx *resx_stmt = as_a <gresx *> (stmt);
7291 int r = gimple_resx_region (resx_stmt);
7292 r = move_stmt_eh_region_nr (r, p);
7293 gimple_resx_set_region (resx_stmt, r);
7295 break;
7297 case GIMPLE_EH_DISPATCH:
7299 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
7300 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
7301 r = move_stmt_eh_region_nr (r, p);
7302 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
7304 break;
7306 case GIMPLE_OMP_RETURN:
7307 case GIMPLE_OMP_CONTINUE:
7308 break;
7310 case GIMPLE_LABEL:
7312 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
7313 so that such labels can be referenced from other regions.
7314 Make sure to update it when seeing a GIMPLE_LABEL though,
7315 that is the owner of the label. */
7316 walk_gimple_op (stmt, move_stmt_op, wi);
7317 *handled_ops_p = true;
7318 tree label = gimple_label_label (as_a <glabel *> (stmt));
7319 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
7320 DECL_CONTEXT (label) = p->to_context;
7322 break;
7324 default:
7325 if (is_gimple_omp (stmt))
7327 /* Do not remap variables inside OMP directives. Variables
7328 referenced in clauses and directive header belong to the
7329 parent function and should not be moved into the child
7330 function. */
7331 bool save_remap_decls_p = p->remap_decls_p;
7332 p->remap_decls_p = false;
7333 *handled_ops_p = true;
7335 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
7336 move_stmt_op, wi);
7338 p->remap_decls_p = save_remap_decls_p;
7340 break;
7343 return NULL_TREE;
7346 /* Move basic block BB from function CFUN to function DEST_FN. The
7347 block is moved out of the original linked list and placed after
7348 block AFTER in the new list. Also, the block is removed from the
7349 original array of blocks and placed in DEST_FN's array of blocks.
7350 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
7351 updated to reflect the moved edges.
7353 The local variables are remapped to new instances, VARS_MAP is used
7354 to record the mapping. */
7356 static void
7357 move_block_to_fn (struct function *dest_cfun, basic_block bb,
7358 basic_block after, bool update_edge_count_p,
7359 struct move_stmt_d *d)
7361 struct control_flow_graph *cfg;
7362 edge_iterator ei;
7363 edge e;
7364 gimple_stmt_iterator si;
7365 unsigned old_len;
7367 /* Remove BB from dominance structures. */
7368 delete_from_dominance_info (CDI_DOMINATORS, bb);
7370 /* Move BB from its current loop to the copy in the new function. */
7371 if (current_loops)
7373 class loop *new_loop = (class loop *)bb->loop_father->aux;
7374 if (new_loop)
7375 bb->loop_father = new_loop;
7378 /* Link BB to the new linked list. */
7379 move_block_after (bb, after);
7381 /* Update the edge count in the corresponding flowgraphs. */
7382 if (update_edge_count_p)
7383 FOR_EACH_EDGE (e, ei, bb->succs)
7385 cfun->cfg->x_n_edges--;
7386 dest_cfun->cfg->x_n_edges++;
7389 /* Remove BB from the original basic block array. */
7390 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
7391 cfun->cfg->x_n_basic_blocks--;
7393 /* Grow DEST_CFUN's basic block array if needed. */
7394 cfg = dest_cfun->cfg;
7395 cfg->x_n_basic_blocks++;
7396 if (bb->index >= cfg->x_last_basic_block)
7397 cfg->x_last_basic_block = bb->index + 1;
7399 old_len = vec_safe_length (cfg->x_basic_block_info);
7400 if ((unsigned) cfg->x_last_basic_block >= old_len)
7401 vec_safe_grow_cleared (cfg->x_basic_block_info,
7402 cfg->x_last_basic_block + 1);
7404 (*cfg->x_basic_block_info)[bb->index] = bb;
7406 /* Remap the variables in phi nodes. */
7407 for (gphi_iterator psi = gsi_start_phis (bb);
7408 !gsi_end_p (psi); )
7410 gphi *phi = psi.phi ();
7411 use_operand_p use;
7412 tree op = PHI_RESULT (phi);
7413 ssa_op_iter oi;
7414 unsigned i;
7416 if (virtual_operand_p (op))
7418 /* Remove the phi nodes for virtual operands (alias analysis will be
7419 run for the new function, anyway). But replace all uses that
7420 might be outside of the region we move. */
7421 use_operand_p use_p;
7422 imm_use_iterator iter;
7423 gimple *use_stmt;
7424 FOR_EACH_IMM_USE_STMT (use_stmt, iter, op)
7425 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7426 SET_USE (use_p, SSA_NAME_VAR (op));
7427 remove_phi_node (&psi, true);
7428 continue;
7431 SET_PHI_RESULT (phi,
7432 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7433 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
7435 op = USE_FROM_PTR (use);
7436 if (TREE_CODE (op) == SSA_NAME)
7437 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7440 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
7442 location_t locus = gimple_phi_arg_location (phi, i);
7443 tree block = LOCATION_BLOCK (locus);
7445 if (locus == UNKNOWN_LOCATION)
7446 continue;
7447 if (d->orig_block == NULL_TREE || block == d->orig_block)
7449 locus = set_block (locus, d->new_block);
7450 gimple_phi_arg_set_location (phi, i, locus);
7454 gsi_next (&psi);
7457 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7459 gimple *stmt = gsi_stmt (si);
7460 struct walk_stmt_info wi;
7462 memset (&wi, 0, sizeof (wi));
7463 wi.info = d;
7464 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
7466 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
7468 tree label = gimple_label_label (label_stmt);
7469 int uid = LABEL_DECL_UID (label);
7471 gcc_assert (uid > -1);
7473 old_len = vec_safe_length (cfg->x_label_to_block_map);
7474 if (old_len <= (unsigned) uid)
7475 vec_safe_grow_cleared (cfg->x_label_to_block_map, uid + 1);
7477 (*cfg->x_label_to_block_map)[uid] = bb;
7478 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
7480 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
7482 if (uid >= dest_cfun->cfg->last_label_uid)
7483 dest_cfun->cfg->last_label_uid = uid + 1;
7486 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
7487 remove_stmt_from_eh_lp_fn (cfun, stmt);
7489 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
7490 gimple_remove_stmt_histograms (cfun, stmt);
7492 /* We cannot leave any operands allocated from the operand caches of
7493 the current function. */
7494 free_stmt_operands (cfun, stmt);
7495 push_cfun (dest_cfun);
7496 update_stmt (stmt);
7497 if (is_gimple_call (stmt))
7498 notice_special_calls (as_a <gcall *> (stmt));
7499 pop_cfun ();
7502 FOR_EACH_EDGE (e, ei, bb->succs)
7503 if (e->goto_locus != UNKNOWN_LOCATION)
7505 tree block = LOCATION_BLOCK (e->goto_locus);
7506 if (d->orig_block == NULL_TREE
7507 || block == d->orig_block)
7508 e->goto_locus = set_block (e->goto_locus, d->new_block);
7512 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7513 the outermost EH region. Use REGION as the incoming base EH region.
7514 If there is no single outermost region, return NULL and set *ALL to
7515 true. */
7517 static eh_region
7518 find_outermost_region_in_block (struct function *src_cfun,
7519 basic_block bb, eh_region region,
7520 bool *all)
7522 gimple_stmt_iterator si;
7524 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7526 gimple *stmt = gsi_stmt (si);
7527 eh_region stmt_region;
7528 int lp_nr;
7530 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7531 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7532 if (stmt_region)
7534 if (region == NULL)
7535 region = stmt_region;
7536 else if (stmt_region != region)
7538 region = eh_region_outermost (src_cfun, stmt_region, region);
7539 if (region == NULL)
7541 *all = true;
7542 return NULL;
7548 return region;
7551 static tree
7552 new_label_mapper (tree decl, void *data)
7554 htab_t hash = (htab_t) data;
7555 struct tree_map *m;
7556 void **slot;
7558 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7560 m = XNEW (struct tree_map);
7561 m->hash = DECL_UID (decl);
7562 m->base.from = decl;
7563 m->to = create_artificial_label (UNKNOWN_LOCATION);
7564 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7565 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7566 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7568 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7569 gcc_assert (*slot == NULL);
7571 *slot = m;
7573 return m->to;
7576 /* Tree walker to replace the decls used inside value expressions by
7577 duplicates. */
7579 static tree
7580 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7582 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7584 switch (TREE_CODE (*tp))
7586 case VAR_DECL:
7587 case PARM_DECL:
7588 case RESULT_DECL:
7589 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7590 break;
7591 default:
7592 break;
7595 if (IS_TYPE_OR_DECL_P (*tp))
7596 *walk_subtrees = false;
7598 return NULL;
7601 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7602 subblocks. */
7604 static void
7605 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7606 tree to_context)
7608 tree *tp, t;
7610 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7612 t = *tp;
7613 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7614 continue;
7615 replace_by_duplicate_decl (&t, vars_map, to_context);
7616 if (t != *tp)
7618 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7620 tree x = DECL_VALUE_EXPR (*tp);
7621 struct replace_decls_d rd = { vars_map, to_context };
7622 unshare_expr (x);
7623 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7624 SET_DECL_VALUE_EXPR (t, x);
7625 DECL_HAS_VALUE_EXPR_P (t) = 1;
7627 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7628 *tp = t;
7632 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7633 replace_block_vars_by_duplicates (block, vars_map, to_context);
7636 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7637 from FN1 to FN2. */
7639 static void
7640 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7641 class loop *loop)
7643 /* Discard it from the old loop array. */
7644 (*get_loops (fn1))[loop->num] = NULL;
7646 /* Place it in the new loop array, assigning it a new number. */
7647 loop->num = number_of_loops (fn2);
7648 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7650 /* Recurse to children. */
7651 for (loop = loop->inner; loop; loop = loop->next)
7652 fixup_loop_arrays_after_move (fn1, fn2, loop);
7655 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7656 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7658 DEBUG_FUNCTION void
7659 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7661 basic_block bb;
7662 edge_iterator ei;
7663 edge e;
7664 bitmap bbs = BITMAP_ALLOC (NULL);
7665 int i;
7667 gcc_assert (entry != NULL);
7668 gcc_assert (entry != exit);
7669 gcc_assert (bbs_p != NULL);
7671 gcc_assert (bbs_p->length () > 0);
7673 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7674 bitmap_set_bit (bbs, bb->index);
7676 gcc_assert (bitmap_bit_p (bbs, entry->index));
7677 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7679 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7681 if (bb == entry)
7683 gcc_assert (single_pred_p (entry));
7684 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7686 else
7687 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7689 e = ei_edge (ei);
7690 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7693 if (bb == exit)
7695 gcc_assert (single_succ_p (exit));
7696 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7698 else
7699 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7701 e = ei_edge (ei);
7702 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7706 BITMAP_FREE (bbs);
7709 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7711 bool
7712 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7714 bitmap release_names = (bitmap)data;
7716 if (TREE_CODE (from) != SSA_NAME)
7717 return true;
7719 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7720 return true;
7723 /* Return LOOP_DIST_ALIAS call if present in BB. */
7725 static gimple *
7726 find_loop_dist_alias (basic_block bb)
7728 gimple_stmt_iterator gsi = gsi_last_bb (bb);
7729 if (!safe_is_a <gcond *> (*gsi))
7730 return NULL;
7732 gsi_prev (&gsi);
7733 if (gsi_end_p (gsi))
7734 return NULL;
7736 gimple *g = gsi_stmt (gsi);
7737 if (gimple_call_internal_p (g, IFN_LOOP_DIST_ALIAS))
7738 return g;
7739 return NULL;
7742 /* Fold loop internal call G like IFN_LOOP_VECTORIZED/IFN_LOOP_DIST_ALIAS
7743 to VALUE and update any immediate uses of it's LHS. */
7745 void
7746 fold_loop_internal_call (gimple *g, tree value)
7748 tree lhs = gimple_call_lhs (g);
7749 use_operand_p use_p;
7750 imm_use_iterator iter;
7751 gimple *use_stmt;
7752 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7754 replace_call_with_value (&gsi, value);
7755 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
7757 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7758 SET_USE (use_p, value);
7759 update_stmt (use_stmt);
7760 /* If we turn conditional to constant, scale profile counts.
7761 We know that the conditional was created by loop distribution
7762 and all basic blocks dominated by the taken edge are part of
7763 the loop distributed. */
7764 if (gimple_code (use_stmt) == GIMPLE_COND)
7766 edge true_edge, false_edge;
7767 extract_true_false_edges_from_block (gimple_bb (use_stmt),
7768 &true_edge, &false_edge);
7769 edge taken_edge = NULL, other_edge = NULL;
7770 if (gimple_cond_true_p (as_a <gcond *>(use_stmt)))
7772 taken_edge = true_edge;
7773 other_edge = false_edge;
7775 else if (gimple_cond_false_p (as_a <gcond *>(use_stmt)))
7777 taken_edge = false_edge;
7778 other_edge = true_edge;
7780 if (taken_edge
7781 && !(taken_edge->probability == profile_probability::always ()))
7783 profile_count old_count = taken_edge->count ();
7784 profile_count new_count = taken_edge->src->count;
7785 taken_edge->probability = profile_probability::always ();
7786 other_edge->probability = profile_probability::never ();
7787 /* If we have multiple predecessors, we can't use the dominance
7788 test. This should not happen as the guarded code should
7789 start with pre-header. */
7790 gcc_assert (single_pred_edge (taken_edge->dest));
7791 if (old_count.nonzero_p ())
7793 taken_edge->dest->count
7794 = taken_edge->dest->count.apply_scale (new_count,
7795 old_count);
7796 scale_strictly_dominated_blocks (taken_edge->dest,
7797 new_count, old_count);
7804 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7805 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7806 single basic block in the original CFG and the new basic block is
7807 returned. DEST_CFUN must not have a CFG yet.
7809 Note that the region need not be a pure SESE region. Blocks inside
7810 the region may contain calls to abort/exit. The only restriction
7811 is that ENTRY_BB should be the only entry point and it must
7812 dominate EXIT_BB.
7814 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7815 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7816 to the new function.
7818 All local variables referenced in the region are assumed to be in
7819 the corresponding BLOCK_VARS and unexpanded variable lists
7820 associated with DEST_CFUN.
7822 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7823 reimplement move_sese_region_to_fn by duplicating the region rather than
7824 moving it. */
7826 basic_block
7827 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7828 basic_block exit_bb, tree orig_block)
7830 vec<basic_block> bbs;
7831 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7832 basic_block after, bb, *entry_pred, *exit_succ, abb;
7833 struct function *saved_cfun = cfun;
7834 int *entry_flag, *exit_flag;
7835 profile_probability *entry_prob, *exit_prob;
7836 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7837 edge e;
7838 edge_iterator ei;
7839 htab_t new_label_map;
7840 hash_map<void *, void *> *eh_map;
7841 class loop *loop = entry_bb->loop_father;
7842 class loop *loop0 = get_loop (saved_cfun, 0);
7843 struct move_stmt_d d;
7845 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7846 region. */
7847 gcc_assert (entry_bb != exit_bb
7848 && (!exit_bb
7849 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7851 /* Collect all the blocks in the region. Manually add ENTRY_BB
7852 because it won't be added by dfs_enumerate_from. */
7853 bbs.create (0);
7854 bbs.safe_push (entry_bb);
7855 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7857 if (flag_checking)
7858 verify_sese (entry_bb, exit_bb, &bbs);
7860 /* The blocks that used to be dominated by something in BBS will now be
7861 dominated by the new block. */
7862 auto_vec<basic_block> dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7863 bbs.address (),
7864 bbs.length ());
7866 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7867 the predecessor edges to ENTRY_BB and the successor edges to
7868 EXIT_BB so that we can re-attach them to the new basic block that
7869 will replace the region. */
7870 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7871 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7872 entry_flag = XNEWVEC (int, num_entry_edges);
7873 entry_prob = XNEWVEC (profile_probability, num_entry_edges);
7874 i = 0;
7875 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7877 entry_prob[i] = e->probability;
7878 entry_flag[i] = e->flags;
7879 entry_pred[i++] = e->src;
7880 remove_edge (e);
7883 if (exit_bb)
7885 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7886 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7887 exit_flag = XNEWVEC (int, num_exit_edges);
7888 exit_prob = XNEWVEC (profile_probability, num_exit_edges);
7889 i = 0;
7890 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7892 exit_prob[i] = e->probability;
7893 exit_flag[i] = e->flags;
7894 exit_succ[i++] = e->dest;
7895 remove_edge (e);
7898 else
7900 num_exit_edges = 0;
7901 exit_succ = NULL;
7902 exit_flag = NULL;
7903 exit_prob = NULL;
7906 /* Switch context to the child function to initialize DEST_FN's CFG. */
7907 gcc_assert (dest_cfun->cfg == NULL);
7908 push_cfun (dest_cfun);
7910 init_empty_tree_cfg ();
7912 /* Initialize EH information for the new function. */
7913 eh_map = NULL;
7914 new_label_map = NULL;
7915 if (saved_cfun->eh)
7917 eh_region region = NULL;
7918 bool all = false;
7920 FOR_EACH_VEC_ELT (bbs, i, bb)
7922 region = find_outermost_region_in_block (saved_cfun, bb, region, &all);
7923 if (all)
7924 break;
7927 init_eh_for_function ();
7928 if (region != NULL || all)
7930 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7931 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7932 new_label_mapper, new_label_map);
7936 /* Initialize an empty loop tree. */
7937 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7938 init_loops_structure (dest_cfun, loops, 1);
7939 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7940 set_loops_for_fn (dest_cfun, loops);
7942 vec<loop_p, va_gc> *larray = get_loops (saved_cfun)->copy ();
7944 /* Move the outlined loop tree part. */
7945 num_nodes = bbs.length ();
7946 FOR_EACH_VEC_ELT (bbs, i, bb)
7948 if (bb->loop_father->header == bb)
7950 class loop *this_loop = bb->loop_father;
7951 /* Avoid the need to remap SSA names used in nb_iterations. */
7952 free_numbers_of_iterations_estimates (this_loop);
7953 class loop *outer = loop_outer (this_loop);
7954 if (outer == loop
7955 /* If the SESE region contains some bbs ending with
7956 a noreturn call, those are considered to belong
7957 to the outermost loop in saved_cfun, rather than
7958 the entry_bb's loop_father. */
7959 || outer == loop0)
7961 if (outer != loop)
7962 num_nodes -= this_loop->num_nodes;
7963 flow_loop_tree_node_remove (bb->loop_father);
7964 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7965 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7968 else if (bb->loop_father == loop0 && loop0 != loop)
7969 num_nodes--;
7971 /* Remove loop exits from the outlined region. */
7972 if (loops_for_fn (saved_cfun)->exits)
7973 FOR_EACH_EDGE (e, ei, bb->succs)
7975 struct loops *l = loops_for_fn (saved_cfun);
7976 loop_exit **slot
7977 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7978 NO_INSERT);
7979 if (slot)
7980 l->exits->clear_slot (slot);
7984 /* Adjust the number of blocks in the tree root of the outlined part. */
7985 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7987 /* Setup a mapping to be used by move_block_to_fn. */
7988 loop->aux = current_loops->tree_root;
7989 loop0->aux = current_loops->tree_root;
7991 /* Fix up orig_loop_num. If the block referenced in it has been moved
7992 to dest_cfun, update orig_loop_num field, otherwise clear it. */
7993 signed char *moved_orig_loop_num = NULL;
7994 for (auto dloop : loops_list (dest_cfun, 0))
7995 if (dloop->orig_loop_num)
7997 if (moved_orig_loop_num == NULL)
7998 moved_orig_loop_num
7999 = XCNEWVEC (signed char, vec_safe_length (larray));
8000 if ((*larray)[dloop->orig_loop_num] != NULL
8001 && get_loop (saved_cfun, dloop->orig_loop_num) == NULL)
8003 if (moved_orig_loop_num[dloop->orig_loop_num] >= 0
8004 && moved_orig_loop_num[dloop->orig_loop_num] < 2)
8005 moved_orig_loop_num[dloop->orig_loop_num]++;
8006 dloop->orig_loop_num = (*larray)[dloop->orig_loop_num]->num;
8008 else
8010 moved_orig_loop_num[dloop->orig_loop_num] = -1;
8011 dloop->orig_loop_num = 0;
8014 pop_cfun ();
8016 if (moved_orig_loop_num)
8018 FOR_EACH_VEC_ELT (bbs, i, bb)
8020 gimple *g = find_loop_dist_alias (bb);
8021 if (g == NULL)
8022 continue;
8024 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
8025 gcc_assert (orig_loop_num
8026 && (unsigned) orig_loop_num < vec_safe_length (larray));
8027 if (moved_orig_loop_num[orig_loop_num] == 2)
8029 /* If we have moved both loops with this orig_loop_num into
8030 dest_cfun and the LOOP_DIST_ALIAS call is being moved there
8031 too, update the first argument. */
8032 gcc_assert ((*larray)[orig_loop_num] != NULL
8033 && (get_loop (saved_cfun, orig_loop_num) == NULL));
8034 tree t = build_int_cst (integer_type_node,
8035 (*larray)[orig_loop_num]->num);
8036 gimple_call_set_arg (g, 0, t);
8037 update_stmt (g);
8038 /* Make sure the following loop will not update it. */
8039 moved_orig_loop_num[orig_loop_num] = 0;
8041 else
8042 /* Otherwise at least one of the loops stayed in saved_cfun.
8043 Remove the LOOP_DIST_ALIAS call. */
8044 fold_loop_internal_call (g, gimple_call_arg (g, 1));
8046 FOR_EACH_BB_FN (bb, saved_cfun)
8048 gimple *g = find_loop_dist_alias (bb);
8049 if (g == NULL)
8050 continue;
8051 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
8052 gcc_assert (orig_loop_num
8053 && (unsigned) orig_loop_num < vec_safe_length (larray));
8054 if (moved_orig_loop_num[orig_loop_num])
8055 /* LOOP_DIST_ALIAS call remained in saved_cfun, if at least one
8056 of the corresponding loops was moved, remove it. */
8057 fold_loop_internal_call (g, gimple_call_arg (g, 1));
8059 XDELETEVEC (moved_orig_loop_num);
8061 ggc_free (larray);
8063 /* Move blocks from BBS into DEST_CFUN. */
8064 gcc_assert (bbs.length () >= 2);
8065 after = dest_cfun->cfg->x_entry_block_ptr;
8066 hash_map<tree, tree> vars_map;
8068 memset (&d, 0, sizeof (d));
8069 d.orig_block = orig_block;
8070 d.new_block = DECL_INITIAL (dest_cfun->decl);
8071 d.from_context = cfun->decl;
8072 d.to_context = dest_cfun->decl;
8073 d.vars_map = &vars_map;
8074 d.new_label_map = new_label_map;
8075 d.eh_map = eh_map;
8076 d.remap_decls_p = true;
8078 if (gimple_in_ssa_p (cfun))
8079 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
8081 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
8082 set_ssa_default_def (dest_cfun, arg, narg);
8083 vars_map.put (arg, narg);
8086 FOR_EACH_VEC_ELT (bbs, i, bb)
8088 /* No need to update edge counts on the last block. It has
8089 already been updated earlier when we detached the region from
8090 the original CFG. */
8091 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
8092 after = bb;
8095 /* Adjust the maximum clique used. */
8096 dest_cfun->last_clique = saved_cfun->last_clique;
8098 loop->aux = NULL;
8099 loop0->aux = NULL;
8100 /* Loop sizes are no longer correct, fix them up. */
8101 loop->num_nodes -= num_nodes;
8102 for (class loop *outer = loop_outer (loop);
8103 outer; outer = loop_outer (outer))
8104 outer->num_nodes -= num_nodes;
8105 loop0->num_nodes -= bbs.length () - num_nodes;
8107 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
8109 class loop *aloop;
8110 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
8111 if (aloop != NULL)
8113 if (aloop->simduid)
8115 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
8116 d.to_context);
8117 dest_cfun->has_simduid_loops = true;
8119 if (aloop->force_vectorize)
8120 dest_cfun->has_force_vectorize_loops = true;
8124 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
8125 if (orig_block)
8127 tree block;
8128 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
8129 == NULL_TREE);
8130 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
8131 = BLOCK_SUBBLOCKS (orig_block);
8132 for (block = BLOCK_SUBBLOCKS (orig_block);
8133 block; block = BLOCK_CHAIN (block))
8134 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
8135 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
8138 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
8139 &vars_map, dest_cfun->decl);
8141 if (new_label_map)
8142 htab_delete (new_label_map);
8143 if (eh_map)
8144 delete eh_map;
8146 /* We need to release ssa-names in a defined order, so first find them,
8147 and then iterate in ascending version order. */
8148 bitmap release_names = BITMAP_ALLOC (NULL);
8149 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
8150 bitmap_iterator bi;
8151 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
8152 release_ssa_name (ssa_name (i));
8153 BITMAP_FREE (release_names);
8155 /* Rewire the entry and exit blocks. The successor to the entry
8156 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
8157 the child function. Similarly, the predecessor of DEST_FN's
8158 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
8159 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
8160 various CFG manipulation function get to the right CFG.
8162 FIXME, this is silly. The CFG ought to become a parameter to
8163 these helpers. */
8164 push_cfun (dest_cfun);
8165 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = entry_bb->count;
8166 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
8167 if (exit_bb)
8169 make_single_succ_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
8170 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = exit_bb->count;
8172 else
8173 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = profile_count::zero ();
8174 pop_cfun ();
8176 /* Back in the original function, the SESE region has disappeared,
8177 create a new basic block in its place. */
8178 bb = create_empty_bb (entry_pred[0]);
8179 if (current_loops)
8180 add_bb_to_loop (bb, loop);
8181 profile_count count = profile_count::zero ();
8182 for (i = 0; i < num_entry_edges; i++)
8184 e = make_edge (entry_pred[i], bb, entry_flag[i]);
8185 e->probability = entry_prob[i];
8186 count += e->count ();
8188 bb->count = count;
8190 for (i = 0; i < num_exit_edges; i++)
8192 e = make_edge (bb, exit_succ[i], exit_flag[i]);
8193 e->probability = exit_prob[i];
8196 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
8197 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
8198 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
8200 if (exit_bb)
8202 free (exit_prob);
8203 free (exit_flag);
8204 free (exit_succ);
8206 free (entry_prob);
8207 free (entry_flag);
8208 free (entry_pred);
8209 bbs.release ();
8211 return bb;
8214 /* Dump default def DEF to file FILE using FLAGS and indentation
8215 SPC. */
8217 static void
8218 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
8220 for (int i = 0; i < spc; ++i)
8221 fprintf (file, " ");
8222 dump_ssaname_info_to_file (file, def, spc);
8224 print_generic_expr (file, TREE_TYPE (def), flags);
8225 fprintf (file, " ");
8226 print_generic_expr (file, def, flags);
8227 fprintf (file, " = ");
8228 print_generic_expr (file, SSA_NAME_VAR (def), flags);
8229 fprintf (file, ";\n");
8232 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
8234 static void
8235 print_no_sanitize_attr_value (FILE *file, tree value)
8237 unsigned int flags = tree_to_uhwi (value);
8238 bool first = true;
8239 for (int i = 0; sanitizer_opts[i].name != NULL; ++i)
8241 if ((sanitizer_opts[i].flag & flags) == sanitizer_opts[i].flag)
8243 if (!first)
8244 fprintf (file, " | ");
8245 fprintf (file, "%s", sanitizer_opts[i].name);
8246 first = false;
8251 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
8254 void
8255 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
8257 tree arg, var, old_current_fndecl = current_function_decl;
8258 struct function *dsf;
8259 bool ignore_topmost_bind = false, any_var = false;
8260 basic_block bb;
8261 tree chain;
8262 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
8263 && decl_is_tm_clone (fndecl));
8264 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
8266 tree fntype = TREE_TYPE (fndecl);
8267 tree attrs[] = { DECL_ATTRIBUTES (fndecl), TYPE_ATTRIBUTES (fntype) };
8269 for (int i = 0; i != 2; ++i)
8271 if (!attrs[i])
8272 continue;
8274 fprintf (file, "__attribute__((");
8276 bool first = true;
8277 tree chain;
8278 for (chain = attrs[i]; chain; first = false, chain = TREE_CHAIN (chain))
8280 if (!first)
8281 fprintf (file, ", ");
8283 tree name = get_attribute_name (chain);
8284 print_generic_expr (file, name, dump_flags);
8285 if (TREE_VALUE (chain) != NULL_TREE)
8287 fprintf (file, " (");
8289 if (strstr (IDENTIFIER_POINTER (name), "no_sanitize"))
8290 print_no_sanitize_attr_value (file, TREE_VALUE (chain));
8291 else if (!strcmp (IDENTIFIER_POINTER (name),
8292 "omp declare variant base"))
8294 tree a = TREE_VALUE (chain);
8295 print_generic_expr (file, TREE_PURPOSE (a), dump_flags);
8296 fprintf (file, " match ");
8297 print_omp_context_selector (file, TREE_VALUE (a),
8298 dump_flags);
8300 else
8301 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
8302 fprintf (file, ")");
8306 fprintf (file, "))\n");
8309 current_function_decl = fndecl;
8310 if (flags & TDF_GIMPLE)
8312 static bool hotness_bb_param_printed = false;
8313 if (profile_info != NULL
8314 && !hotness_bb_param_printed)
8316 hotness_bb_param_printed = true;
8317 fprintf (file,
8318 "/* --param=gimple-fe-computed-hot-bb-threshold=%" PRId64
8319 " */\n", get_hot_bb_threshold ());
8322 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
8323 dump_flags | TDF_SLIM);
8324 fprintf (file, " __GIMPLE (%s",
8325 (fun->curr_properties & PROP_ssa) ? "ssa"
8326 : (fun->curr_properties & PROP_cfg) ? "cfg"
8327 : "");
8329 if (fun && fun->cfg)
8331 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (fun);
8332 if (bb->count.initialized_p ())
8333 fprintf (file, ",%s(%" PRIu64 ")",
8334 profile_quality_as_string (bb->count.quality ()),
8335 bb->count.value ());
8336 if (dump_flags & TDF_UID)
8337 fprintf (file, ")\n%sD_%u (", function_name (fun),
8338 DECL_UID (fndecl));
8339 else
8340 fprintf (file, ")\n%s (", function_name (fun));
8343 else
8345 print_generic_expr (file, TREE_TYPE (fntype), dump_flags);
8346 if (dump_flags & TDF_UID)
8347 fprintf (file, " %sD.%u %s(", function_name (fun), DECL_UID (fndecl),
8348 tmclone ? "[tm-clone] " : "");
8349 else
8350 fprintf (file, " %s %s(", function_name (fun),
8351 tmclone ? "[tm-clone] " : "");
8354 arg = DECL_ARGUMENTS (fndecl);
8355 while (arg)
8357 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
8358 fprintf (file, " ");
8359 print_generic_expr (file, arg, dump_flags);
8360 if (DECL_CHAIN (arg))
8361 fprintf (file, ", ");
8362 arg = DECL_CHAIN (arg);
8364 fprintf (file, ")\n");
8366 dsf = DECL_STRUCT_FUNCTION (fndecl);
8367 if (dsf && (flags & TDF_EH))
8368 dump_eh_tree (file, dsf);
8370 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
8372 dump_node (fndecl, TDF_SLIM | flags, file);
8373 current_function_decl = old_current_fndecl;
8374 return;
8377 /* When GIMPLE is lowered, the variables are no longer available in
8378 BIND_EXPRs, so display them separately. */
8379 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
8381 unsigned ix;
8382 ignore_topmost_bind = true;
8384 fprintf (file, "{\n");
8385 if (gimple_in_ssa_p (fun)
8386 && (flags & TDF_ALIAS))
8388 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
8389 arg = DECL_CHAIN (arg))
8391 tree def = ssa_default_def (fun, arg);
8392 if (def)
8393 dump_default_def (file, def, 2, flags);
8396 tree res = DECL_RESULT (fun->decl);
8397 if (res != NULL_TREE
8398 && DECL_BY_REFERENCE (res))
8400 tree def = ssa_default_def (fun, res);
8401 if (def)
8402 dump_default_def (file, def, 2, flags);
8405 tree static_chain = fun->static_chain_decl;
8406 if (static_chain != NULL_TREE)
8408 tree def = ssa_default_def (fun, static_chain);
8409 if (def)
8410 dump_default_def (file, def, 2, flags);
8414 if (!vec_safe_is_empty (fun->local_decls))
8415 FOR_EACH_LOCAL_DECL (fun, ix, var)
8417 print_generic_decl (file, var, flags);
8418 fprintf (file, "\n");
8420 any_var = true;
8423 tree name;
8425 if (gimple_in_ssa_p (fun))
8426 FOR_EACH_SSA_NAME (ix, name, fun)
8428 if (!SSA_NAME_VAR (name)
8429 /* SSA name with decls without a name still get
8430 dumped as _N, list those explicitely as well even
8431 though we've dumped the decl declaration as D.xxx
8432 above. */
8433 || !SSA_NAME_IDENTIFIER (name))
8435 fprintf (file, " ");
8436 print_generic_expr (file, TREE_TYPE (name), flags);
8437 fprintf (file, " ");
8438 print_generic_expr (file, name, flags);
8439 fprintf (file, ";\n");
8441 any_var = true;
8446 if (fun && fun->decl == fndecl
8447 && fun->cfg
8448 && basic_block_info_for_fn (fun))
8450 /* If the CFG has been built, emit a CFG-based dump. */
8451 if (!ignore_topmost_bind)
8452 fprintf (file, "{\n");
8454 if (any_var && n_basic_blocks_for_fn (fun))
8455 fprintf (file, "\n");
8457 FOR_EACH_BB_FN (bb, fun)
8458 dump_bb (file, bb, 2, flags);
8460 fprintf (file, "}\n");
8462 else if (fun && (fun->curr_properties & PROP_gimple_any))
8464 /* The function is now in GIMPLE form but the CFG has not been
8465 built yet. Emit the single sequence of GIMPLE statements
8466 that make up its body. */
8467 gimple_seq body = gimple_body (fndecl);
8469 if (gimple_seq_first_stmt (body)
8470 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
8471 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
8472 print_gimple_seq (file, body, 0, flags);
8473 else
8475 if (!ignore_topmost_bind)
8476 fprintf (file, "{\n");
8478 if (any_var)
8479 fprintf (file, "\n");
8481 print_gimple_seq (file, body, 2, flags);
8482 fprintf (file, "}\n");
8485 else
8487 int indent;
8489 /* Make a tree based dump. */
8490 chain = DECL_SAVED_TREE (fndecl);
8491 if (chain && TREE_CODE (chain) == BIND_EXPR)
8493 if (ignore_topmost_bind)
8495 chain = BIND_EXPR_BODY (chain);
8496 indent = 2;
8498 else
8499 indent = 0;
8501 else
8503 if (!ignore_topmost_bind)
8505 fprintf (file, "{\n");
8506 /* No topmost bind, pretend it's ignored for later. */
8507 ignore_topmost_bind = true;
8509 indent = 2;
8512 if (any_var)
8513 fprintf (file, "\n");
8515 print_generic_stmt_indented (file, chain, flags, indent);
8516 if (ignore_topmost_bind)
8517 fprintf (file, "}\n");
8520 if (flags & TDF_ENUMERATE_LOCALS)
8521 dump_enumerated_decls (file, flags);
8522 fprintf (file, "\n\n");
8524 current_function_decl = old_current_fndecl;
8527 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
8529 DEBUG_FUNCTION void
8530 debug_function (tree fn, dump_flags_t flags)
8532 dump_function_to_file (fn, stderr, flags);
8536 /* Print on FILE the indexes for the predecessors of basic_block BB. */
8538 static void
8539 print_pred_bbs (FILE *file, basic_block bb)
8541 edge e;
8542 edge_iterator ei;
8544 FOR_EACH_EDGE (e, ei, bb->preds)
8545 fprintf (file, "bb_%d ", e->src->index);
8549 /* Print on FILE the indexes for the successors of basic_block BB. */
8551 static void
8552 print_succ_bbs (FILE *file, basic_block bb)
8554 edge e;
8555 edge_iterator ei;
8557 FOR_EACH_EDGE (e, ei, bb->succs)
8558 fprintf (file, "bb_%d ", e->dest->index);
8561 /* Print to FILE the basic block BB following the VERBOSITY level. */
8563 void
8564 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
8566 char *s_indent = (char *) alloca ((size_t) indent + 1);
8567 memset ((void *) s_indent, ' ', (size_t) indent);
8568 s_indent[indent] = '\0';
8570 /* Print basic_block's header. */
8571 if (verbosity >= 2)
8573 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
8574 print_pred_bbs (file, bb);
8575 fprintf (file, "}, succs = {");
8576 print_succ_bbs (file, bb);
8577 fprintf (file, "})\n");
8580 /* Print basic_block's body. */
8581 if (verbosity >= 3)
8583 fprintf (file, "%s {\n", s_indent);
8584 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
8585 fprintf (file, "%s }\n", s_indent);
8589 /* Print loop information. */
8591 void
8592 print_loop_info (FILE *file, const class loop *loop, const char *prefix)
8594 if (loop->can_be_parallel)
8595 fprintf (file, ", can_be_parallel");
8596 if (loop->warned_aggressive_loop_optimizations)
8597 fprintf (file, ", warned_aggressive_loop_optimizations");
8598 if (loop->dont_vectorize)
8599 fprintf (file, ", dont_vectorize");
8600 if (loop->force_vectorize)
8601 fprintf (file, ", force_vectorize");
8602 if (loop->in_oacc_kernels_region)
8603 fprintf (file, ", in_oacc_kernels_region");
8604 if (loop->finite_p)
8605 fprintf (file, ", finite_p");
8606 if (loop->unroll)
8607 fprintf (file, "\n%sunroll %d", prefix, loop->unroll);
8608 if (loop->nb_iterations)
8610 fprintf (file, "\n%sniter ", prefix);
8611 print_generic_expr (file, loop->nb_iterations);
8614 if (loop->any_upper_bound)
8616 fprintf (file, "\n%supper_bound ", prefix);
8617 print_decu (loop->nb_iterations_upper_bound, file);
8619 if (loop->any_likely_upper_bound)
8621 fprintf (file, "\n%slikely_upper_bound ", prefix);
8622 print_decu (loop->nb_iterations_likely_upper_bound, file);
8625 if (loop->any_estimate)
8627 fprintf (file, "\n%sestimate ", prefix);
8628 print_decu (loop->nb_iterations_estimate, file);
8630 bool reliable;
8631 sreal iterations;
8632 if (loop->num && expected_loop_iterations_by_profile (loop, &iterations, &reliable))
8634 fprintf (file, "\n%siterations by profile: %f (%s%s) entry count:", prefix,
8635 iterations.to_double (), reliable ? "reliable" : "unreliable",
8636 maybe_flat_loop_profile (loop) ? ", maybe flat" : "");
8637 loop_count_in (loop).dump (file, cfun);
8642 static void print_loop_and_siblings (FILE *, class loop *, int, int);
8644 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
8645 VERBOSITY level this outputs the contents of the loop, or just its
8646 structure. */
8648 static void
8649 print_loop (FILE *file, class loop *loop, int indent, int verbosity)
8651 char *s_indent;
8652 basic_block bb;
8654 if (loop == NULL)
8655 return;
8657 s_indent = (char *) alloca ((size_t) indent + 1);
8658 memset ((void *) s_indent, ' ', (size_t) indent);
8659 s_indent[indent] = '\0';
8661 /* Print loop's header. */
8662 fprintf (file, "%sloop_%d (", s_indent, loop->num);
8663 if (loop->header)
8664 fprintf (file, "header = %d", loop->header->index);
8665 else
8667 fprintf (file, "deleted)\n");
8668 return;
8670 if (loop->latch)
8671 fprintf (file, ", latch = %d", loop->latch->index);
8672 else
8673 fprintf (file, ", multiple latches");
8674 print_loop_info (file, loop, s_indent);
8675 fprintf (file, ")\n");
8677 /* Print loop's body. */
8678 if (verbosity >= 1)
8680 fprintf (file, "%s{\n", s_indent);
8681 FOR_EACH_BB_FN (bb, cfun)
8682 if (bb->loop_father == loop)
8683 print_loops_bb (file, bb, indent, verbosity);
8685 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
8686 fprintf (file, "%s}\n", s_indent);
8690 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8691 spaces. Following VERBOSITY level this outputs the contents of the
8692 loop, or just its structure. */
8694 static void
8695 print_loop_and_siblings (FILE *file, class loop *loop, int indent,
8696 int verbosity)
8698 if (loop == NULL)
8699 return;
8701 print_loop (file, loop, indent, verbosity);
8702 print_loop_and_siblings (file, loop->next, indent, verbosity);
8705 /* Follow a CFG edge from the entry point of the program, and on entry
8706 of a loop, pretty print the loop structure on FILE. */
8708 void
8709 print_loops (FILE *file, int verbosity)
8711 basic_block bb;
8713 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8714 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
8715 if (bb && bb->loop_father)
8716 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
8719 /* Dump a loop. */
8721 DEBUG_FUNCTION void
8722 debug (class loop &ref)
8724 print_loop (stderr, &ref, 0, /*verbosity*/0);
8727 DEBUG_FUNCTION void
8728 debug (class loop *ptr)
8730 if (ptr)
8731 debug (*ptr);
8732 else
8733 fprintf (stderr, "<nil>\n");
8736 /* Dump a loop verbosely. */
8738 DEBUG_FUNCTION void
8739 debug_verbose (class loop &ref)
8741 print_loop (stderr, &ref, 0, /*verbosity*/3);
8744 DEBUG_FUNCTION void
8745 debug_verbose (class loop *ptr)
8747 if (ptr)
8748 debug (*ptr);
8749 else
8750 fprintf (stderr, "<nil>\n");
8754 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8756 DEBUG_FUNCTION void
8757 debug_loops (int verbosity)
8759 print_loops (stderr, verbosity);
8762 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8764 DEBUG_FUNCTION void
8765 debug_loop (class loop *loop, int verbosity)
8767 print_loop (stderr, loop, 0, verbosity);
8770 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8771 level. */
8773 DEBUG_FUNCTION void
8774 debug_loop_num (unsigned num, int verbosity)
8776 debug_loop (get_loop (cfun, num), verbosity);
8779 /* Return true if BB ends with a call, possibly followed by some
8780 instructions that must stay with the call. Return false,
8781 otherwise. */
8783 static bool
8784 gimple_block_ends_with_call_p (basic_block bb)
8786 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8787 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8791 /* Return true if BB ends with a conditional branch. Return false,
8792 otherwise. */
8794 static bool
8795 gimple_block_ends_with_condjump_p (const_basic_block bb)
8797 return safe_is_a <gcond *> (*gsi_last_bb (const_cast <basic_block> (bb)));
8801 /* Return true if statement T may terminate execution of BB in ways not
8802 explicitly represtented in the CFG. */
8804 bool
8805 stmt_can_terminate_bb_p (gimple *t)
8807 tree fndecl = NULL_TREE;
8808 int call_flags = 0;
8810 /* Eh exception not handled internally terminates execution of the whole
8811 function. */
8812 if (stmt_can_throw_external (cfun, t))
8813 return true;
8815 /* NORETURN and LONGJMP calls already have an edge to exit.
8816 CONST and PURE calls do not need one.
8817 We don't currently check for CONST and PURE here, although
8818 it would be a good idea, because those attributes are
8819 figured out from the RTL in mark_constant_function, and
8820 the counter incrementation code from -fprofile-arcs
8821 leads to different results from -fbranch-probabilities. */
8822 if (is_gimple_call (t))
8824 fndecl = gimple_call_fndecl (t);
8825 call_flags = gimple_call_flags (t);
8828 if (is_gimple_call (t)
8829 && fndecl
8830 && fndecl_built_in_p (fndecl)
8831 && (call_flags & ECF_NOTHROW)
8832 && !(call_flags & ECF_RETURNS_TWICE)
8833 /* fork() doesn't really return twice, but the effect of
8834 wrapping it in __gcov_fork() which calls __gcov_dump() and
8835 __gcov_reset() and clears the counters before forking has the same
8836 effect as returning twice. Force a fake edge. */
8837 && !fndecl_built_in_p (fndecl, BUILT_IN_FORK))
8838 return false;
8840 if (is_gimple_call (t))
8842 edge_iterator ei;
8843 edge e;
8844 basic_block bb;
8846 if (call_flags & (ECF_PURE | ECF_CONST)
8847 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8848 return false;
8850 /* Function call may do longjmp, terminate program or do other things.
8851 Special case noreturn that have non-abnormal edges out as in this case
8852 the fact is sufficiently represented by lack of edges out of T. */
8853 if (!(call_flags & ECF_NORETURN))
8854 return true;
8856 bb = gimple_bb (t);
8857 FOR_EACH_EDGE (e, ei, bb->succs)
8858 if ((e->flags & EDGE_FAKE) == 0)
8859 return true;
8862 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8863 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8864 return true;
8866 return false;
8870 /* Add fake edges to the function exit for any non constant and non
8871 noreturn calls (or noreturn calls with EH/abnormal edges),
8872 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8873 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8874 that were split.
8876 The goal is to expose cases in which entering a basic block does
8877 not imply that all subsequent instructions must be executed. */
8879 static int
8880 gimple_flow_call_edges_add (sbitmap blocks)
8882 int i;
8883 int blocks_split = 0;
8884 int last_bb = last_basic_block_for_fn (cfun);
8885 bool check_last_block = false;
8887 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8888 return 0;
8890 if (! blocks)
8891 check_last_block = true;
8892 else
8893 check_last_block = bitmap_bit_p (blocks,
8894 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8896 /* In the last basic block, before epilogue generation, there will be
8897 a fallthru edge to EXIT. Special care is required if the last insn
8898 of the last basic block is a call because make_edge folds duplicate
8899 edges, which would result in the fallthru edge also being marked
8900 fake, which would result in the fallthru edge being removed by
8901 remove_fake_edges, which would result in an invalid CFG.
8903 Moreover, we can't elide the outgoing fake edge, since the block
8904 profiler needs to take this into account in order to solve the minimal
8905 spanning tree in the case that the call doesn't return.
8907 Handle this by adding a dummy instruction in a new last basic block. */
8908 if (check_last_block)
8910 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8911 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8912 gimple *t = NULL;
8914 if (!gsi_end_p (gsi))
8915 t = gsi_stmt (gsi);
8917 if (t && stmt_can_terminate_bb_p (t))
8919 edge e;
8921 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8922 if (e)
8924 gsi_insert_on_edge (e, gimple_build_nop ());
8925 gsi_commit_edge_inserts ();
8930 /* Now add fake edges to the function exit for any non constant
8931 calls since there is no way that we can determine if they will
8932 return or not... */
8933 for (i = 0; i < last_bb; i++)
8935 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8936 gimple_stmt_iterator gsi;
8937 gimple *stmt, *last_stmt;
8939 if (!bb)
8940 continue;
8942 if (blocks && !bitmap_bit_p (blocks, i))
8943 continue;
8945 gsi = gsi_last_nondebug_bb (bb);
8946 if (!gsi_end_p (gsi))
8948 last_stmt = gsi_stmt (gsi);
8951 stmt = gsi_stmt (gsi);
8952 if (stmt_can_terminate_bb_p (stmt))
8954 edge e;
8956 /* The handling above of the final block before the
8957 epilogue should be enough to verify that there is
8958 no edge to the exit block in CFG already.
8959 Calling make_edge in such case would cause us to
8960 mark that edge as fake and remove it later. */
8961 if (flag_checking && stmt == last_stmt)
8963 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8964 gcc_assert (e == NULL);
8967 /* Note that the following may create a new basic block
8968 and renumber the existing basic blocks. */
8969 if (stmt != last_stmt)
8971 e = split_block (bb, stmt);
8972 if (e)
8973 blocks_split++;
8975 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8976 e->probability = profile_probability::guessed_never ();
8978 gsi_prev (&gsi);
8980 while (!gsi_end_p (gsi));
8984 if (blocks_split)
8985 checking_verify_flow_info ();
8987 return blocks_split;
8990 /* Removes edge E and all the blocks dominated by it, and updates dominance
8991 information. The IL in E->src needs to be updated separately.
8992 If dominance info is not available, only the edge E is removed.*/
8994 void
8995 remove_edge_and_dominated_blocks (edge e)
8997 vec<basic_block> bbs_to_fix_dom = vNULL;
8998 edge f;
8999 edge_iterator ei;
9000 bool none_removed = false;
9001 unsigned i;
9002 basic_block bb, dbb;
9003 bitmap_iterator bi;
9005 /* If we are removing a path inside a non-root loop that may change
9006 loop ownership of blocks or remove loops. Mark loops for fixup. */
9007 if (current_loops
9008 && loop_outer (e->src->loop_father) != NULL
9009 && e->src->loop_father == e->dest->loop_father)
9010 loops_state_set (LOOPS_NEED_FIXUP);
9012 if (!dom_info_available_p (CDI_DOMINATORS))
9014 remove_edge (e);
9015 return;
9018 /* No updating is needed for edges to exit. */
9019 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
9021 if (cfgcleanup_altered_bbs)
9022 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
9023 remove_edge (e);
9024 return;
9027 /* First, we find the basic blocks to remove. If E->dest has a predecessor
9028 that is not dominated by E->dest, then this set is empty. Otherwise,
9029 all the basic blocks dominated by E->dest are removed.
9031 Also, to DF_IDOM we store the immediate dominators of the blocks in
9032 the dominance frontier of E (i.e., of the successors of the
9033 removed blocks, if there are any, and of E->dest otherwise). */
9034 FOR_EACH_EDGE (f, ei, e->dest->preds)
9036 if (f == e)
9037 continue;
9039 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
9041 none_removed = true;
9042 break;
9046 auto_bitmap df, df_idom;
9047 auto_vec<basic_block> bbs_to_remove;
9048 if (none_removed)
9049 bitmap_set_bit (df_idom,
9050 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
9051 else
9053 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
9054 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
9056 FOR_EACH_EDGE (f, ei, bb->succs)
9058 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
9059 bitmap_set_bit (df, f->dest->index);
9062 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
9063 bitmap_clear_bit (df, bb->index);
9065 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
9067 bb = BASIC_BLOCK_FOR_FN (cfun, i);
9068 bitmap_set_bit (df_idom,
9069 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
9073 if (cfgcleanup_altered_bbs)
9075 /* Record the set of the altered basic blocks. */
9076 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
9077 bitmap_ior_into (cfgcleanup_altered_bbs, df);
9080 /* Remove E and the cancelled blocks. */
9081 if (none_removed)
9082 remove_edge (e);
9083 else
9085 /* Walk backwards so as to get a chance to substitute all
9086 released DEFs into debug stmts. See
9087 eliminate_unnecessary_stmts() in tree-ssa-dce.cc for more
9088 details. */
9089 for (i = bbs_to_remove.length (); i-- > 0; )
9090 delete_basic_block (bbs_to_remove[i]);
9093 /* Update the dominance information. The immediate dominator may change only
9094 for blocks whose immediate dominator belongs to DF_IDOM:
9096 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
9097 removal. Let Z the arbitrary block such that idom(Z) = Y and
9098 Z dominates X after the removal. Before removal, there exists a path P
9099 from Y to X that avoids Z. Let F be the last edge on P that is
9100 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
9101 dominates W, and because of P, Z does not dominate W), and W belongs to
9102 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
9103 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
9105 bb = BASIC_BLOCK_FOR_FN (cfun, i);
9106 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
9107 dbb;
9108 dbb = next_dom_son (CDI_DOMINATORS, dbb))
9109 bbs_to_fix_dom.safe_push (dbb);
9112 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
9114 bbs_to_fix_dom.release ();
9117 /* Purge dead EH edges from basic block BB. */
9119 bool
9120 gimple_purge_dead_eh_edges (basic_block bb)
9122 bool changed = false;
9123 edge e;
9124 edge_iterator ei;
9125 gimple *stmt = *gsi_last_bb (bb);
9127 if (stmt && stmt_can_throw_internal (cfun, stmt))
9128 return false;
9130 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
9132 if (e->flags & EDGE_EH)
9134 remove_edge_and_dominated_blocks (e);
9135 changed = true;
9137 else
9138 ei_next (&ei);
9141 return changed;
9144 /* Purge dead EH edges from basic block listed in BLOCKS. */
9146 bool
9147 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
9149 bool changed = false;
9150 unsigned i;
9151 bitmap_iterator bi;
9153 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
9155 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
9157 /* Earlier gimple_purge_dead_eh_edges could have removed
9158 this basic block already. */
9159 gcc_assert (bb || changed);
9160 if (bb != NULL)
9161 changed |= gimple_purge_dead_eh_edges (bb);
9164 return changed;
9167 /* Purge dead abnormal call edges from basic block BB. */
9169 bool
9170 gimple_purge_dead_abnormal_call_edges (basic_block bb)
9172 bool changed = false;
9173 edge e;
9174 edge_iterator ei;
9175 gimple *stmt = *gsi_last_bb (bb);
9177 if (stmt && stmt_can_make_abnormal_goto (stmt))
9178 return false;
9180 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
9182 if (e->flags & EDGE_ABNORMAL)
9184 if (e->flags & EDGE_FALLTHRU)
9185 e->flags &= ~EDGE_ABNORMAL;
9186 else
9187 remove_edge_and_dominated_blocks (e);
9188 changed = true;
9190 else
9191 ei_next (&ei);
9194 return changed;
9197 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
9199 bool
9200 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
9202 bool changed = false;
9203 unsigned i;
9204 bitmap_iterator bi;
9206 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
9208 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
9210 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
9211 this basic block already. */
9212 gcc_assert (bb || changed);
9213 if (bb != NULL)
9214 changed |= gimple_purge_dead_abnormal_call_edges (bb);
9217 return changed;
9220 /* This function is called whenever a new edge is created or
9221 redirected. */
9223 static void
9224 gimple_execute_on_growing_pred (edge e)
9226 basic_block bb = e->dest;
9228 if (!gimple_seq_empty_p (phi_nodes (bb)))
9229 reserve_phi_args_for_new_edge (bb);
9232 /* This function is called immediately before edge E is removed from
9233 the edge vector E->dest->preds. */
9235 static void
9236 gimple_execute_on_shrinking_pred (edge e)
9238 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
9239 remove_phi_args (e);
9242 /*---------------------------------------------------------------------------
9243 Helper functions for Loop versioning
9244 ---------------------------------------------------------------------------*/
9246 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
9247 of 'first'. Both of them are dominated by 'new_head' basic block. When
9248 'new_head' was created by 'second's incoming edge it received phi arguments
9249 on the edge by split_edge(). Later, additional edge 'e' was created to
9250 connect 'new_head' and 'first'. Now this routine adds phi args on this
9251 additional edge 'e' that new_head to second edge received as part of edge
9252 splitting. */
9254 static void
9255 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
9256 basic_block new_head, edge e)
9258 gphi *phi1, *phi2;
9259 gphi_iterator psi1, psi2;
9260 tree def;
9261 edge e2 = find_edge (new_head, second);
9263 /* Because NEW_HEAD has been created by splitting SECOND's incoming
9264 edge, we should always have an edge from NEW_HEAD to SECOND. */
9265 gcc_assert (e2 != NULL);
9267 /* Browse all 'second' basic block phi nodes and add phi args to
9268 edge 'e' for 'first' head. PHI args are always in correct order. */
9270 for (psi2 = gsi_start_phis (second),
9271 psi1 = gsi_start_phis (first);
9272 !gsi_end_p (psi2) && !gsi_end_p (psi1);
9273 gsi_next (&psi2), gsi_next (&psi1))
9275 phi1 = psi1.phi ();
9276 phi2 = psi2.phi ();
9277 def = PHI_ARG_DEF (phi2, e2->dest_idx);
9278 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
9283 /* Adds a if else statement to COND_BB with condition COND_EXPR.
9284 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
9285 the destination of the ELSE part. */
9287 static void
9288 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
9289 basic_block second_head ATTRIBUTE_UNUSED,
9290 basic_block cond_bb, void *cond_e)
9292 gimple_stmt_iterator gsi;
9293 gimple *new_cond_expr;
9294 tree cond_expr = (tree) cond_e;
9295 edge e0;
9297 /* Build new conditional expr */
9298 gsi = gsi_last_bb (cond_bb);
9300 cond_expr = force_gimple_operand_gsi_1 (&gsi, cond_expr,
9301 is_gimple_condexpr_for_cond,
9302 NULL_TREE, false,
9303 GSI_CONTINUE_LINKING);
9304 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
9305 NULL_TREE, NULL_TREE);
9307 /* Add new cond in cond_bb. */
9308 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
9310 /* Adjust edges appropriately to connect new head with first head
9311 as well as second head. */
9312 e0 = single_succ_edge (cond_bb);
9313 e0->flags &= ~EDGE_FALLTHRU;
9314 e0->flags |= EDGE_FALSE_VALUE;
9318 /* Do book-keeping of basic block BB for the profile consistency checker.
9319 Store the counting in RECORD. */
9320 static void
9321 gimple_account_profile_record (basic_block bb,
9322 struct profile_record *record)
9324 gimple_stmt_iterator i;
9325 for (i = gsi_start_nondebug_after_labels_bb (bb); !gsi_end_p (i);
9326 gsi_next_nondebug (&i))
9328 record->size
9329 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
9330 if (profile_info)
9332 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa ().initialized_p ()
9333 && ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa ().nonzero_p ()
9334 && bb->count.ipa ().initialized_p ())
9335 record->time
9336 += estimate_num_insns (gsi_stmt (i),
9337 &eni_time_weights)
9338 * bb->count.ipa ().to_gcov_type ();
9340 else if (bb->count.initialized_p ()
9341 && ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.initialized_p ())
9342 record->time
9343 += estimate_num_insns
9344 (gsi_stmt (i),
9345 &eni_time_weights)
9346 * bb->count.to_sreal_scale
9347 (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count).to_double ();
9348 else
9349 record->time
9350 += estimate_num_insns (gsi_stmt (i), &eni_time_weights);
9354 struct cfg_hooks gimple_cfg_hooks = {
9355 "gimple",
9356 gimple_verify_flow_info,
9357 gimple_dump_bb, /* dump_bb */
9358 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
9359 create_bb, /* create_basic_block */
9360 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
9361 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
9362 gimple_can_remove_branch_p, /* can_remove_branch_p */
9363 remove_bb, /* delete_basic_block */
9364 gimple_split_block, /* split_block */
9365 gimple_move_block_after, /* move_block_after */
9366 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
9367 gimple_merge_blocks, /* merge_blocks */
9368 gimple_predict_edge, /* predict_edge */
9369 gimple_predicted_by_p, /* predicted_by_p */
9370 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
9371 gimple_duplicate_bb, /* duplicate_block */
9372 gimple_split_edge, /* split_edge */
9373 gimple_make_forwarder_block, /* make_forward_block */
9374 NULL, /* tidy_fallthru_edge */
9375 NULL, /* force_nonfallthru */
9376 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
9377 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
9378 gimple_flow_call_edges_add, /* flow_call_edges_add */
9379 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
9380 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
9381 gimple_duplicate_loop_body_to_header_edge, /* duplicate loop for trees */
9382 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
9383 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
9384 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
9385 flush_pending_stmts, /* flush_pending_stmts */
9386 gimple_empty_block_p, /* block_empty_p */
9387 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
9388 gimple_account_profile_record,
9392 /* Split all critical edges. Split some extra (not necessarily critical) edges
9393 if FOR_EDGE_INSERTION_P is true. */
9395 unsigned int
9396 split_critical_edges (bool for_edge_insertion_p /* = false */)
9398 basic_block bb;
9399 edge e;
9400 edge_iterator ei;
9402 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
9403 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
9404 mappings around the calls to split_edge. */
9405 start_recording_case_labels ();
9406 FOR_ALL_BB_FN (bb, cfun)
9408 FOR_EACH_EDGE (e, ei, bb->succs)
9410 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
9411 split_edge (e);
9412 /* PRE inserts statements to edges and expects that
9413 since split_critical_edges was done beforehand, committing edge
9414 insertions will not split more edges. In addition to critical
9415 edges we must split edges that have multiple successors and
9416 end by control flow statements, such as RESX.
9417 Go ahead and split them too. This matches the logic in
9418 gimple_find_edge_insert_loc. */
9419 else if (for_edge_insertion_p
9420 && (!single_pred_p (e->dest)
9421 || !gimple_seq_empty_p (phi_nodes (e->dest))
9422 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
9423 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
9424 && !(e->flags & EDGE_ABNORMAL))
9426 gimple_stmt_iterator gsi;
9428 gsi = gsi_last_bb (e->src);
9429 if (!gsi_end_p (gsi)
9430 && stmt_ends_bb_p (gsi_stmt (gsi))
9431 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
9432 && !gimple_call_builtin_p (gsi_stmt (gsi),
9433 BUILT_IN_RETURN)))
9434 split_edge (e);
9438 end_recording_case_labels ();
9439 return 0;
9442 namespace {
9444 const pass_data pass_data_split_crit_edges =
9446 GIMPLE_PASS, /* type */
9447 "crited", /* name */
9448 OPTGROUP_NONE, /* optinfo_flags */
9449 TV_TREE_SPLIT_EDGES, /* tv_id */
9450 PROP_cfg, /* properties_required */
9451 PROP_no_crit_edges, /* properties_provided */
9452 0, /* properties_destroyed */
9453 0, /* todo_flags_start */
9454 0, /* todo_flags_finish */
9457 class pass_split_crit_edges : public gimple_opt_pass
9459 public:
9460 pass_split_crit_edges (gcc::context *ctxt)
9461 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
9464 /* opt_pass methods: */
9465 unsigned int execute (function *) final override
9467 return split_critical_edges ();
9470 opt_pass * clone () final override
9472 return new pass_split_crit_edges (m_ctxt);
9474 }; // class pass_split_crit_edges
9476 } // anon namespace
9478 gimple_opt_pass *
9479 make_pass_split_crit_edges (gcc::context *ctxt)
9481 return new pass_split_crit_edges (ctxt);
9485 /* Insert COND expression which is GIMPLE_COND after STMT
9486 in basic block BB with appropriate basic block split
9487 and creation of a new conditionally executed basic block.
9488 Update profile so the new bb is visited with probability PROB.
9489 Return created basic block. */
9490 basic_block
9491 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond,
9492 profile_probability prob)
9494 edge fall = split_block (bb, stmt);
9495 gimple_stmt_iterator iter = gsi_last_bb (bb);
9496 basic_block new_bb;
9498 /* Insert cond statement. */
9499 gcc_assert (gimple_code (cond) == GIMPLE_COND);
9500 if (gsi_end_p (iter))
9501 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
9502 else
9503 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
9505 /* Create conditionally executed block. */
9506 new_bb = create_empty_bb (bb);
9507 edge e = make_edge (bb, new_bb, EDGE_TRUE_VALUE);
9508 e->probability = prob;
9509 new_bb->count = e->count ();
9510 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
9512 /* Fix edge for split bb. */
9513 fall->flags = EDGE_FALSE_VALUE;
9514 fall->probability -= e->probability;
9516 /* Update dominance info. */
9517 if (dom_info_available_p (CDI_DOMINATORS))
9519 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
9520 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
9523 /* Update loop info. */
9524 if (current_loops)
9525 add_bb_to_loop (new_bb, bb->loop_father);
9527 return new_bb;
9532 /* Given a basic block B which ends with a conditional and has
9533 precisely two successors, determine which of the edges is taken if
9534 the conditional is true and which is taken if the conditional is
9535 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
9537 void
9538 extract_true_false_edges_from_block (basic_block b,
9539 edge *true_edge,
9540 edge *false_edge)
9542 edge e = EDGE_SUCC (b, 0);
9544 if (e->flags & EDGE_TRUE_VALUE)
9546 *true_edge = e;
9547 *false_edge = EDGE_SUCC (b, 1);
9549 else
9551 *false_edge = e;
9552 *true_edge = EDGE_SUCC (b, 1);
9557 /* From a controlling predicate in the immediate dominator DOM of
9558 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
9559 predicate evaluates to true and false and store them to
9560 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
9561 they are non-NULL. Returns true if the edges can be determined,
9562 else return false. */
9564 bool
9565 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
9566 edge *true_controlled_edge,
9567 edge *false_controlled_edge)
9569 basic_block bb = phiblock;
9570 edge true_edge, false_edge, tem;
9571 edge e0 = NULL, e1 = NULL;
9573 /* We have to verify that one edge into the PHI node is dominated
9574 by the true edge of the predicate block and the other edge
9575 dominated by the false edge. This ensures that the PHI argument
9576 we are going to take is completely determined by the path we
9577 take from the predicate block.
9578 We can only use BB dominance checks below if the destination of
9579 the true/false edges are dominated by their edge, thus only
9580 have a single predecessor. */
9581 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
9582 tem = EDGE_PRED (bb, 0);
9583 if (tem == true_edge
9584 || (single_pred_p (true_edge->dest)
9585 && (tem->src == true_edge->dest
9586 || dominated_by_p (CDI_DOMINATORS,
9587 tem->src, true_edge->dest))))
9588 e0 = tem;
9589 else if (tem == false_edge
9590 || (single_pred_p (false_edge->dest)
9591 && (tem->src == false_edge->dest
9592 || dominated_by_p (CDI_DOMINATORS,
9593 tem->src, false_edge->dest))))
9594 e1 = tem;
9595 else
9596 return false;
9597 tem = EDGE_PRED (bb, 1);
9598 if (tem == true_edge
9599 || (single_pred_p (true_edge->dest)
9600 && (tem->src == true_edge->dest
9601 || dominated_by_p (CDI_DOMINATORS,
9602 tem->src, true_edge->dest))))
9603 e0 = tem;
9604 else if (tem == false_edge
9605 || (single_pred_p (false_edge->dest)
9606 && (tem->src == false_edge->dest
9607 || dominated_by_p (CDI_DOMINATORS,
9608 tem->src, false_edge->dest))))
9609 e1 = tem;
9610 else
9611 return false;
9612 if (!e0 || !e1)
9613 return false;
9615 if (true_controlled_edge)
9616 *true_controlled_edge = e0;
9617 if (false_controlled_edge)
9618 *false_controlled_edge = e1;
9620 return true;
9623 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9624 range [low, high]. Place associated stmts before *GSI. */
9626 void
9627 generate_range_test (basic_block bb, tree index, tree low, tree high,
9628 tree *lhs, tree *rhs)
9630 tree type = TREE_TYPE (index);
9631 tree utype = range_check_type (type);
9633 low = fold_convert (utype, low);
9634 high = fold_convert (utype, high);
9636 gimple_seq seq = NULL;
9637 index = gimple_convert (&seq, utype, index);
9638 *lhs = gimple_build (&seq, MINUS_EXPR, utype, index, low);
9639 *rhs = const_binop (MINUS_EXPR, utype, high, low);
9641 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9642 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
9645 /* Return the basic block that belongs to label numbered INDEX
9646 of a switch statement. */
9648 basic_block
9649 gimple_switch_label_bb (function *ifun, gswitch *gs, unsigned index)
9651 return label_to_block (ifun, CASE_LABEL (gimple_switch_label (gs, index)));
9654 /* Return the default basic block of a switch statement. */
9656 basic_block
9657 gimple_switch_default_bb (function *ifun, gswitch *gs)
9659 return gimple_switch_label_bb (ifun, gs, 0);
9662 /* Return the edge that belongs to label numbered INDEX
9663 of a switch statement. */
9665 edge
9666 gimple_switch_edge (function *ifun, gswitch *gs, unsigned index)
9668 return find_edge (gimple_bb (gs), gimple_switch_label_bb (ifun, gs, index));
9671 /* Return the default edge of a switch statement. */
9673 edge
9674 gimple_switch_default_edge (function *ifun, gswitch *gs)
9676 return gimple_switch_edge (ifun, gs, 0);
9679 /* Return true if the only executable statement in BB is a GIMPLE_COND. */
9681 bool
9682 cond_only_block_p (basic_block bb)
9684 /* BB must have no executable statements. */
9685 gimple_stmt_iterator gsi = gsi_after_labels (bb);
9686 if (phi_nodes (bb))
9687 return false;
9688 while (!gsi_end_p (gsi))
9690 gimple *stmt = gsi_stmt (gsi);
9691 if (is_gimple_debug (stmt))
9693 else if (gimple_code (stmt) == GIMPLE_NOP
9694 || gimple_code (stmt) == GIMPLE_PREDICT
9695 || gimple_code (stmt) == GIMPLE_COND)
9697 else
9698 return false;
9699 gsi_next (&gsi);
9701 return true;
9705 /* Emit return warnings. */
9707 namespace {
9709 const pass_data pass_data_warn_function_return =
9711 GIMPLE_PASS, /* type */
9712 "*warn_function_return", /* name */
9713 OPTGROUP_NONE, /* optinfo_flags */
9714 TV_NONE, /* tv_id */
9715 PROP_cfg, /* properties_required */
9716 0, /* properties_provided */
9717 0, /* properties_destroyed */
9718 0, /* todo_flags_start */
9719 0, /* todo_flags_finish */
9722 class pass_warn_function_return : public gimple_opt_pass
9724 public:
9725 pass_warn_function_return (gcc::context *ctxt)
9726 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
9729 /* opt_pass methods: */
9730 unsigned int execute (function *) final override;
9732 }; // class pass_warn_function_return
9734 unsigned int
9735 pass_warn_function_return::execute (function *fun)
9737 location_t location;
9738 gimple *last;
9739 edge e;
9740 edge_iterator ei;
9742 if (!targetm.warn_func_return (fun->decl))
9743 return 0;
9745 /* If we have a path to EXIT, then we do return. */
9746 if (TREE_THIS_VOLATILE (fun->decl)
9747 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
9749 location = UNKNOWN_LOCATION;
9750 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (fun)->preds);
9751 (e = ei_safe_edge (ei)); )
9753 last = *gsi_last_bb (e->src);
9754 if ((gimple_code (last) == GIMPLE_RETURN
9755 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
9756 && location == UNKNOWN_LOCATION
9757 && ((location = LOCATION_LOCUS (gimple_location (last)))
9758 != UNKNOWN_LOCATION)
9759 && !optimize)
9760 break;
9761 /* When optimizing, replace return stmts in noreturn functions
9762 with __builtin_unreachable () call. */
9763 if (optimize && gimple_code (last) == GIMPLE_RETURN)
9765 location_t loc = gimple_location (last);
9766 gimple *new_stmt = gimple_build_builtin_unreachable (loc);
9767 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9768 gsi_replace (&gsi, new_stmt, true);
9769 remove_edge (e);
9771 else
9772 ei_next (&ei);
9774 if (location == UNKNOWN_LOCATION)
9775 location = cfun->function_end_locus;
9776 warning_at (location, 0, "%<noreturn%> function does return");
9779 /* If we see "return;" in some basic block, then we do reach the end
9780 without returning a value. */
9781 else if (warn_return_type > 0
9782 && !warning_suppressed_p (fun->decl, OPT_Wreturn_type)
9783 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
9785 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
9787 greturn *return_stmt = dyn_cast <greturn *> (*gsi_last_bb (e->src));
9788 if (return_stmt
9789 && gimple_return_retval (return_stmt) == NULL
9790 && !warning_suppressed_p (return_stmt, OPT_Wreturn_type))
9792 location = gimple_location (return_stmt);
9793 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9794 location = fun->function_end_locus;
9795 if (warning_at (location, OPT_Wreturn_type,
9796 "control reaches end of non-void function"))
9797 suppress_warning (fun->decl, OPT_Wreturn_type);
9798 break;
9801 /* The C++ FE turns fallthrough from the end of non-void function
9802 into __builtin_unreachable () call with BUILTINS_LOCATION.
9803 Recognize those as well as calls from ubsan_instrument_return. */
9804 basic_block bb;
9805 if (!warning_suppressed_p (fun->decl, OPT_Wreturn_type))
9806 FOR_EACH_BB_FN (bb, fun)
9807 if (EDGE_COUNT (bb->succs) == 0)
9809 gimple *last = *gsi_last_bb (bb);
9810 const enum built_in_function ubsan_missing_ret
9811 = BUILT_IN_UBSAN_HANDLE_MISSING_RETURN;
9812 if (last
9813 && ((LOCATION_LOCUS (gimple_location (last))
9814 == BUILTINS_LOCATION
9815 && (gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE)
9816 || gimple_call_builtin_p (last,
9817 BUILT_IN_UNREACHABLE_TRAP)
9818 || gimple_call_builtin_p (last, BUILT_IN_TRAP)))
9819 || gimple_call_builtin_p (last, ubsan_missing_ret)))
9821 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9822 gsi_prev_nondebug (&gsi);
9823 gimple *prev = gsi_stmt (gsi);
9824 if (prev == NULL)
9825 location = UNKNOWN_LOCATION;
9826 else
9827 location = gimple_location (prev);
9828 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9829 location = fun->function_end_locus;
9830 if (warning_at (location, OPT_Wreturn_type,
9831 "control reaches end of non-void function"))
9832 suppress_warning (fun->decl, OPT_Wreturn_type);
9833 break;
9837 return 0;
9840 } // anon namespace
9842 gimple_opt_pass *
9843 make_pass_warn_function_return (gcc::context *ctxt)
9845 return new pass_warn_function_return (ctxt);
9848 /* Walk a gimplified function and warn for functions whose return value is
9849 ignored and attribute((warn_unused_result)) is set. This is done before
9850 inlining, so we don't have to worry about that. */
9852 static void
9853 do_warn_unused_result (gimple_seq seq)
9855 tree fdecl, ftype;
9856 gimple_stmt_iterator i;
9858 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
9860 gimple *g = gsi_stmt (i);
9862 switch (gimple_code (g))
9864 case GIMPLE_BIND:
9865 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
9866 break;
9867 case GIMPLE_TRY:
9868 do_warn_unused_result (gimple_try_eval (g));
9869 do_warn_unused_result (gimple_try_cleanup (g));
9870 break;
9871 case GIMPLE_CATCH:
9872 do_warn_unused_result (gimple_catch_handler (
9873 as_a <gcatch *> (g)));
9874 break;
9875 case GIMPLE_EH_FILTER:
9876 do_warn_unused_result (gimple_eh_filter_failure (g));
9877 break;
9879 case GIMPLE_CALL:
9880 if (gimple_call_lhs (g))
9881 break;
9882 if (gimple_call_internal_p (g))
9883 break;
9885 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9886 LHS. All calls whose value is ignored should be
9887 represented like this. Look for the attribute. */
9888 fdecl = gimple_call_fndecl (g);
9889 ftype = gimple_call_fntype (g);
9891 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9893 location_t loc = gimple_location (g);
9895 if (fdecl)
9896 warning_at (loc, OPT_Wunused_result,
9897 "ignoring return value of %qD "
9898 "declared with attribute %<warn_unused_result%>",
9899 fdecl);
9900 else
9901 warning_at (loc, OPT_Wunused_result,
9902 "ignoring return value of function "
9903 "declared with attribute %<warn_unused_result%>");
9905 break;
9907 default:
9908 /* Not a container, not a call, or a call whose value is used. */
9909 break;
9914 namespace {
9916 const pass_data pass_data_warn_unused_result =
9918 GIMPLE_PASS, /* type */
9919 "*warn_unused_result", /* name */
9920 OPTGROUP_NONE, /* optinfo_flags */
9921 TV_NONE, /* tv_id */
9922 PROP_gimple_any, /* properties_required */
9923 0, /* properties_provided */
9924 0, /* properties_destroyed */
9925 0, /* todo_flags_start */
9926 0, /* todo_flags_finish */
9929 class pass_warn_unused_result : public gimple_opt_pass
9931 public:
9932 pass_warn_unused_result (gcc::context *ctxt)
9933 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9936 /* opt_pass methods: */
9937 bool gate (function *) final override { return flag_warn_unused_result; }
9938 unsigned int execute (function *) final override
9940 do_warn_unused_result (gimple_body (current_function_decl));
9941 return 0;
9944 }; // class pass_warn_unused_result
9946 } // anon namespace
9948 gimple_opt_pass *
9949 make_pass_warn_unused_result (gcc::context *ctxt)
9951 return new pass_warn_unused_result (ctxt);
9954 /* Maybe Remove stores to variables we marked write-only.
9955 Return true if a store was removed. */
9956 static bool
9957 maybe_remove_writeonly_store (gimple_stmt_iterator &gsi, gimple *stmt,
9958 bitmap dce_ssa_names)
9960 /* Keep access when store has side effect, i.e. in case when source
9961 is volatile. */
9962 if (!gimple_store_p (stmt)
9963 || gimple_has_side_effects (stmt)
9964 || optimize_debug)
9965 return false;
9967 tree lhs = get_base_address (gimple_get_lhs (stmt));
9969 if (!VAR_P (lhs)
9970 || (!TREE_STATIC (lhs) && !DECL_EXTERNAL (lhs))
9971 || !varpool_node::get (lhs)->writeonly)
9972 return false;
9974 if (dump_file && (dump_flags & TDF_DETAILS))
9976 fprintf (dump_file, "Removing statement, writes"
9977 " to write only var:\n");
9978 print_gimple_stmt (dump_file, stmt, 0,
9979 TDF_VOPS|TDF_MEMSYMS);
9982 /* Mark ssa name defining to be checked for simple dce. */
9983 if (gimple_assign_single_p (stmt))
9985 tree rhs = gimple_assign_rhs1 (stmt);
9986 if (TREE_CODE (rhs) == SSA_NAME
9987 && !SSA_NAME_IS_DEFAULT_DEF (rhs))
9988 bitmap_set_bit (dce_ssa_names, SSA_NAME_VERSION (rhs));
9990 unlink_stmt_vdef (stmt);
9991 gsi_remove (&gsi, true);
9992 release_defs (stmt);
9993 return true;
9996 /* IPA passes, compilation of earlier functions or inlining
9997 might have changed some properties, such as marked functions nothrow,
9998 pure, const or noreturn.
9999 Remove redundant edges and basic blocks, and create new ones if necessary. */
10001 unsigned int
10002 execute_fixup_cfg (void)
10004 basic_block bb;
10005 gimple_stmt_iterator gsi;
10006 int todo = 0;
10007 cgraph_node *node = cgraph_node::get (current_function_decl);
10008 /* Same scaling is also done by ipa_merge_profiles. */
10009 profile_count num = node->count;
10010 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
10011 bool scale = num.initialized_p () && !(num == den);
10012 auto_bitmap dce_ssa_names;
10014 if (scale)
10016 profile_count::adjust_for_ipa_scaling (&num, &den);
10017 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
10018 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
10019 = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
10022 FOR_EACH_BB_FN (bb, cfun)
10024 if (scale)
10025 bb->count = bb->count.apply_scale (num, den);
10026 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
10028 gimple *stmt = gsi_stmt (gsi);
10029 tree decl = is_gimple_call (stmt)
10030 ? gimple_call_fndecl (stmt)
10031 : NULL;
10032 if (decl)
10034 int flags = gimple_call_flags (stmt);
10035 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
10037 if (gimple_in_ssa_p (cfun))
10039 todo |= TODO_update_ssa | TODO_cleanup_cfg;
10040 update_stmt (stmt);
10043 if (flags & ECF_NORETURN
10044 && fixup_noreturn_call (stmt))
10045 todo |= TODO_cleanup_cfg;
10048 /* Remove stores to variables we marked write-only. */
10049 if (maybe_remove_writeonly_store (gsi, stmt, dce_ssa_names))
10051 todo |= TODO_update_ssa | TODO_cleanup_cfg;
10052 continue;
10055 /* For calls we can simply remove LHS when it is known
10056 to be write-only. */
10057 if (is_gimple_call (stmt)
10058 && gimple_get_lhs (stmt))
10060 tree lhs = get_base_address (gimple_get_lhs (stmt));
10062 if (VAR_P (lhs)
10063 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
10064 && varpool_node::get (lhs)->writeonly)
10066 gimple_call_set_lhs (stmt, NULL);
10067 update_stmt (stmt);
10068 todo |= TODO_update_ssa | TODO_cleanup_cfg;
10072 gsi_next (&gsi);
10074 if (gimple *last = *gsi_last_bb (bb))
10076 if (maybe_clean_eh_stmt (last)
10077 && gimple_purge_dead_eh_edges (bb))
10078 todo |= TODO_cleanup_cfg;
10079 if (gimple_purge_dead_abnormal_call_edges (bb))
10080 todo |= TODO_cleanup_cfg;
10083 /* If we have a basic block with no successors that does not
10084 end with a control statement or a noreturn call end it with
10085 a call to __builtin_unreachable. This situation can occur
10086 when inlining a noreturn call that does in fact return. */
10087 if (EDGE_COUNT (bb->succs) == 0)
10089 gimple *stmt = last_nondebug_stmt (bb);
10090 if (!stmt
10091 || (!is_ctrl_stmt (stmt)
10092 && (!is_gimple_call (stmt)
10093 || !gimple_call_noreturn_p (stmt))))
10095 if (stmt && is_gimple_call (stmt))
10096 gimple_call_set_ctrl_altering (stmt, false);
10097 stmt = gimple_build_builtin_unreachable (UNKNOWN_LOCATION);
10098 gimple_stmt_iterator gsi = gsi_last_bb (bb);
10099 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
10100 if (!cfun->after_inlining)
10101 if (tree fndecl = gimple_call_fndecl (stmt))
10103 gcall *call_stmt = dyn_cast <gcall *> (stmt);
10104 node->create_edge (cgraph_node::get_create (fndecl),
10105 call_stmt, bb->count);
10110 if (scale)
10112 update_max_bb_count ();
10113 compute_function_frequency ();
10116 if (current_loops
10117 && (todo & TODO_cleanup_cfg))
10118 loops_state_set (LOOPS_NEED_FIXUP);
10120 simple_dce_from_worklist (dce_ssa_names);
10122 return todo;
10125 namespace {
10127 const pass_data pass_data_fixup_cfg =
10129 GIMPLE_PASS, /* type */
10130 "fixup_cfg", /* name */
10131 OPTGROUP_NONE, /* optinfo_flags */
10132 TV_NONE, /* tv_id */
10133 PROP_cfg, /* properties_required */
10134 0, /* properties_provided */
10135 0, /* properties_destroyed */
10136 0, /* todo_flags_start */
10137 0, /* todo_flags_finish */
10140 class pass_fixup_cfg : public gimple_opt_pass
10142 public:
10143 pass_fixup_cfg (gcc::context *ctxt)
10144 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
10147 /* opt_pass methods: */
10148 opt_pass * clone () final override { return new pass_fixup_cfg (m_ctxt); }
10149 unsigned int execute (function *) final override
10151 return execute_fixup_cfg ();
10154 }; // class pass_fixup_cfg
10156 } // anon namespace
10158 gimple_opt_pass *
10159 make_pass_fixup_cfg (gcc::context *ctxt)
10161 return new pass_fixup_cfg (ctxt);
10164 /* Garbage collection support for edge_def. */
10166 extern void gt_ggc_mx (tree&);
10167 extern void gt_ggc_mx (gimple *&);
10168 extern void gt_ggc_mx (rtx&);
10169 extern void gt_ggc_mx (basic_block&);
10171 static void
10172 gt_ggc_mx (rtx_insn *& x)
10174 if (x)
10175 gt_ggc_mx_rtx_def ((void *) x);
10178 void
10179 gt_ggc_mx (edge_def *e)
10181 tree block = LOCATION_BLOCK (e->goto_locus);
10182 gt_ggc_mx (e->src);
10183 gt_ggc_mx (e->dest);
10184 if (current_ir_type () == IR_GIMPLE)
10185 gt_ggc_mx (e->insns.g);
10186 else
10187 gt_ggc_mx (e->insns.r);
10188 gt_ggc_mx (block);
10191 /* PCH support for edge_def. */
10193 extern void gt_pch_nx (tree&);
10194 extern void gt_pch_nx (gimple *&);
10195 extern void gt_pch_nx (rtx&);
10196 extern void gt_pch_nx (basic_block&);
10198 static void
10199 gt_pch_nx (rtx_insn *& x)
10201 if (x)
10202 gt_pch_nx_rtx_def ((void *) x);
10205 void
10206 gt_pch_nx (edge_def *e)
10208 tree block = LOCATION_BLOCK (e->goto_locus);
10209 gt_pch_nx (e->src);
10210 gt_pch_nx (e->dest);
10211 if (current_ir_type () == IR_GIMPLE)
10212 gt_pch_nx (e->insns.g);
10213 else
10214 gt_pch_nx (e->insns.r);
10215 gt_pch_nx (block);
10218 void
10219 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
10221 tree block = LOCATION_BLOCK (e->goto_locus);
10222 op (&(e->src), NULL, cookie);
10223 op (&(e->dest), NULL, cookie);
10224 if (current_ir_type () == IR_GIMPLE)
10225 op (&(e->insns.g), NULL, cookie);
10226 else
10227 op (&(e->insns.r), NULL, cookie);
10228 op (&(block), &(block), cookie);
10231 #if CHECKING_P
10233 namespace selftest {
10235 /* Helper function for CFG selftests: create a dummy function decl
10236 and push it as cfun. */
10238 static tree
10239 push_fndecl (const char *name)
10241 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
10242 /* FIXME: this uses input_location: */
10243 tree fndecl = build_fn_decl (name, fn_type);
10244 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
10245 NULL_TREE, integer_type_node);
10246 DECL_RESULT (fndecl) = retval;
10247 push_struct_function (fndecl);
10248 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10249 ASSERT_TRUE (fun != NULL);
10250 init_empty_tree_cfg_for_function (fun);
10251 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
10252 ASSERT_EQ (0, n_edges_for_fn (fun));
10253 return fndecl;
10256 /* These tests directly create CFGs.
10257 Compare with the static fns within tree-cfg.cc:
10258 - build_gimple_cfg
10259 - make_blocks: calls create_basic_block (seq, bb);
10260 - make_edges. */
10262 /* Verify a simple cfg of the form:
10263 ENTRY -> A -> B -> C -> EXIT. */
10265 static void
10266 test_linear_chain ()
10268 gimple_register_cfg_hooks ();
10270 tree fndecl = push_fndecl ("cfg_test_linear_chain");
10271 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10273 /* Create some empty blocks. */
10274 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
10275 basic_block bb_b = create_empty_bb (bb_a);
10276 basic_block bb_c = create_empty_bb (bb_b);
10278 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
10279 ASSERT_EQ (0, n_edges_for_fn (fun));
10281 /* Create some edges: a simple linear chain of BBs. */
10282 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
10283 make_edge (bb_a, bb_b, 0);
10284 make_edge (bb_b, bb_c, 0);
10285 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10287 /* Verify the edges. */
10288 ASSERT_EQ (4, n_edges_for_fn (fun));
10289 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
10290 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
10291 ASSERT_EQ (1, bb_a->preds->length ());
10292 ASSERT_EQ (1, bb_a->succs->length ());
10293 ASSERT_EQ (1, bb_b->preds->length ());
10294 ASSERT_EQ (1, bb_b->succs->length ());
10295 ASSERT_EQ (1, bb_c->preds->length ());
10296 ASSERT_EQ (1, bb_c->succs->length ());
10297 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
10298 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
10300 /* Verify the dominance information
10301 Each BB in our simple chain should be dominated by the one before
10302 it. */
10303 calculate_dominance_info (CDI_DOMINATORS);
10304 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
10305 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
10306 auto_vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
10307 ASSERT_EQ (1, dom_by_b.length ());
10308 ASSERT_EQ (bb_c, dom_by_b[0]);
10309 free_dominance_info (CDI_DOMINATORS);
10311 /* Similarly for post-dominance: each BB in our chain is post-dominated
10312 by the one after it. */
10313 calculate_dominance_info (CDI_POST_DOMINATORS);
10314 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
10315 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
10316 auto_vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
10317 ASSERT_EQ (1, postdom_by_b.length ());
10318 ASSERT_EQ (bb_a, postdom_by_b[0]);
10319 free_dominance_info (CDI_POST_DOMINATORS);
10321 pop_cfun ();
10324 /* Verify a simple CFG of the form:
10325 ENTRY
10329 /t \f
10335 EXIT. */
10337 static void
10338 test_diamond ()
10340 gimple_register_cfg_hooks ();
10342 tree fndecl = push_fndecl ("cfg_test_diamond");
10343 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10345 /* Create some empty blocks. */
10346 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
10347 basic_block bb_b = create_empty_bb (bb_a);
10348 basic_block bb_c = create_empty_bb (bb_a);
10349 basic_block bb_d = create_empty_bb (bb_b);
10351 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
10352 ASSERT_EQ (0, n_edges_for_fn (fun));
10354 /* Create the edges. */
10355 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
10356 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
10357 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
10358 make_edge (bb_b, bb_d, 0);
10359 make_edge (bb_c, bb_d, 0);
10360 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10362 /* Verify the edges. */
10363 ASSERT_EQ (6, n_edges_for_fn (fun));
10364 ASSERT_EQ (1, bb_a->preds->length ());
10365 ASSERT_EQ (2, bb_a->succs->length ());
10366 ASSERT_EQ (1, bb_b->preds->length ());
10367 ASSERT_EQ (1, bb_b->succs->length ());
10368 ASSERT_EQ (1, bb_c->preds->length ());
10369 ASSERT_EQ (1, bb_c->succs->length ());
10370 ASSERT_EQ (2, bb_d->preds->length ());
10371 ASSERT_EQ (1, bb_d->succs->length ());
10373 /* Verify the dominance information. */
10374 calculate_dominance_info (CDI_DOMINATORS);
10375 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
10376 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
10377 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
10378 auto_vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
10379 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
10380 dom_by_a.release ();
10381 auto_vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
10382 ASSERT_EQ (0, dom_by_b.length ());
10383 dom_by_b.release ();
10384 free_dominance_info (CDI_DOMINATORS);
10386 /* Similarly for post-dominance. */
10387 calculate_dominance_info (CDI_POST_DOMINATORS);
10388 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
10389 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
10390 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
10391 auto_vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
10392 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
10393 postdom_by_d.release ();
10394 auto_vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
10395 ASSERT_EQ (0, postdom_by_b.length ());
10396 postdom_by_b.release ();
10397 free_dominance_info (CDI_POST_DOMINATORS);
10399 pop_cfun ();
10402 /* Verify that we can handle a CFG containing a "complete" aka
10403 fully-connected subgraph (where A B C D below all have edges
10404 pointing to each other node, also to themselves).
10405 e.g.:
10406 ENTRY EXIT
10412 A<--->B
10413 ^^ ^^
10414 | \ / |
10415 | X |
10416 | / \ |
10417 VV VV
10418 C<--->D
10421 static void
10422 test_fully_connected ()
10424 gimple_register_cfg_hooks ();
10426 tree fndecl = push_fndecl ("cfg_fully_connected");
10427 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10429 const int n = 4;
10431 /* Create some empty blocks. */
10432 auto_vec <basic_block> subgraph_nodes;
10433 for (int i = 0; i < n; i++)
10434 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
10436 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
10437 ASSERT_EQ (0, n_edges_for_fn (fun));
10439 /* Create the edges. */
10440 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
10441 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10442 for (int i = 0; i < n; i++)
10443 for (int j = 0; j < n; j++)
10444 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
10446 /* Verify the edges. */
10447 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
10448 /* The first one is linked to ENTRY/EXIT as well as itself and
10449 everything else. */
10450 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
10451 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
10452 /* The other ones in the subgraph are linked to everything in
10453 the subgraph (including themselves). */
10454 for (int i = 1; i < n; i++)
10456 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
10457 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
10460 /* Verify the dominance information. */
10461 calculate_dominance_info (CDI_DOMINATORS);
10462 /* The initial block in the subgraph should be dominated by ENTRY. */
10463 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
10464 get_immediate_dominator (CDI_DOMINATORS,
10465 subgraph_nodes[0]));
10466 /* Every other block in the subgraph should be dominated by the
10467 initial block. */
10468 for (int i = 1; i < n; i++)
10469 ASSERT_EQ (subgraph_nodes[0],
10470 get_immediate_dominator (CDI_DOMINATORS,
10471 subgraph_nodes[i]));
10472 free_dominance_info (CDI_DOMINATORS);
10474 /* Similarly for post-dominance. */
10475 calculate_dominance_info (CDI_POST_DOMINATORS);
10476 /* The initial block in the subgraph should be postdominated by EXIT. */
10477 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
10478 get_immediate_dominator (CDI_POST_DOMINATORS,
10479 subgraph_nodes[0]));
10480 /* Every other block in the subgraph should be postdominated by the
10481 initial block, since that leads to EXIT. */
10482 for (int i = 1; i < n; i++)
10483 ASSERT_EQ (subgraph_nodes[0],
10484 get_immediate_dominator (CDI_POST_DOMINATORS,
10485 subgraph_nodes[i]));
10486 free_dominance_info (CDI_POST_DOMINATORS);
10488 pop_cfun ();
10491 /* Run all of the selftests within this file. */
10493 void
10494 tree_cfg_cc_tests ()
10496 test_linear_chain ();
10497 test_diamond ();
10498 test_fully_connected ();
10501 } // namespace selftest
10503 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
10504 - loop
10505 - nested loops
10506 - switch statement (a block with many out-edges)
10507 - something that jumps to itself
10508 - etc */
10510 #endif /* CHECKING_P */